code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def add_member(self, email, fullname, membership_type='normal'):
'''
Add a member to the board. Membership type can be normal or admin.
Returns JSON of all members if successful or raises an Unauthorised
exception if not.
'''
return self.fetch_json(
uri_path=self.base_uri + '/members',
http_method='PUT',
query_params={
'email': email,
'fullName': fullname,
'type': membership_type
}
) | def function[add_member, parameter[self, email, fullname, membership_type]]:
constant[
Add a member to the board. Membership type can be normal or admin.
Returns JSON of all members if successful or raises an Unauthorised
exception if not.
]
return[call[name[self].fetch_json, parameter[]]] | keyword[def] identifier[add_member] ( identifier[self] , identifier[email] , identifier[fullname] , identifier[membership_type] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[fetch_json] (
identifier[uri_path] = identifier[self] . identifier[base_uri] + literal[string] ,
identifier[http_method] = literal[string] ,
identifier[query_params] ={
literal[string] : identifier[email] ,
literal[string] : identifier[fullname] ,
literal[string] : identifier[membership_type]
}
) | def add_member(self, email, fullname, membership_type='normal'):
"""
Add a member to the board. Membership type can be normal or admin.
Returns JSON of all members if successful or raises an Unauthorised
exception if not.
"""
return self.fetch_json(uri_path=self.base_uri + '/members', http_method='PUT', query_params={'email': email, 'fullName': fullname, 'type': membership_type}) |
def record_move_fields(rec, tag, field_positions_local,
field_position_local=None):
"""
Move some fields to the position specified by 'field_position_local'.
:param rec: a record structure as returned by create_record()
:param tag: the tag of the fields to be moved
:param field_positions_local: the positions of the fields to move
:param field_position_local: insert the field before that
field_position_local. If unspecified, appends
the fields :return: the field_position_local
is the operation was successful
"""
fields = record_delete_fields(
rec, tag,
field_positions_local=field_positions_local)
return record_add_fields(
rec, tag, fields,
field_position_local=field_position_local) | def function[record_move_fields, parameter[rec, tag, field_positions_local, field_position_local]]:
constant[
Move some fields to the position specified by 'field_position_local'.
:param rec: a record structure as returned by create_record()
:param tag: the tag of the fields to be moved
:param field_positions_local: the positions of the fields to move
:param field_position_local: insert the field before that
field_position_local. If unspecified, appends
the fields :return: the field_position_local
is the operation was successful
]
variable[fields] assign[=] call[name[record_delete_fields], parameter[name[rec], name[tag]]]
return[call[name[record_add_fields], parameter[name[rec], name[tag], name[fields]]]] | keyword[def] identifier[record_move_fields] ( identifier[rec] , identifier[tag] , identifier[field_positions_local] ,
identifier[field_position_local] = keyword[None] ):
literal[string]
identifier[fields] = identifier[record_delete_fields] (
identifier[rec] , identifier[tag] ,
identifier[field_positions_local] = identifier[field_positions_local] )
keyword[return] identifier[record_add_fields] (
identifier[rec] , identifier[tag] , identifier[fields] ,
identifier[field_position_local] = identifier[field_position_local] ) | def record_move_fields(rec, tag, field_positions_local, field_position_local=None):
"""
Move some fields to the position specified by 'field_position_local'.
:param rec: a record structure as returned by create_record()
:param tag: the tag of the fields to be moved
:param field_positions_local: the positions of the fields to move
:param field_position_local: insert the field before that
field_position_local. If unspecified, appends
the fields :return: the field_position_local
is the operation was successful
"""
fields = record_delete_fields(rec, tag, field_positions_local=field_positions_local)
return record_add_fields(rec, tag, fields, field_position_local=field_position_local) |
def fit(self, X, y, random_state=np.random):
"""Create constraints from labels and learn the RCA model.
Needs num_constraints specified in constructor.
Parameters
----------
X : (n x d) data matrix
each row corresponds to a single instance
y : (n) data labels
random_state : a random.seed object to fix the random_state if needed.
"""
X, y = self._prepare_inputs(X, y, ensure_min_samples=2)
chunks = Constraints(y).chunks(num_chunks=self.num_chunks,
chunk_size=self.chunk_size,
random_state=random_state)
return RCA.fit(self, X, chunks) | def function[fit, parameter[self, X, y, random_state]]:
constant[Create constraints from labels and learn the RCA model.
Needs num_constraints specified in constructor.
Parameters
----------
X : (n x d) data matrix
each row corresponds to a single instance
y : (n) data labels
random_state : a random.seed object to fix the random_state if needed.
]
<ast.Tuple object at 0x7da1b1e969e0> assign[=] call[name[self]._prepare_inputs, parameter[name[X], name[y]]]
variable[chunks] assign[=] call[call[name[Constraints], parameter[name[y]]].chunks, parameter[]]
return[call[name[RCA].fit, parameter[name[self], name[X], name[chunks]]]] | keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] , identifier[random_state] = identifier[np] . identifier[random] ):
literal[string]
identifier[X] , identifier[y] = identifier[self] . identifier[_prepare_inputs] ( identifier[X] , identifier[y] , identifier[ensure_min_samples] = literal[int] )
identifier[chunks] = identifier[Constraints] ( identifier[y] ). identifier[chunks] ( identifier[num_chunks] = identifier[self] . identifier[num_chunks] ,
identifier[chunk_size] = identifier[self] . identifier[chunk_size] ,
identifier[random_state] = identifier[random_state] )
keyword[return] identifier[RCA] . identifier[fit] ( identifier[self] , identifier[X] , identifier[chunks] ) | def fit(self, X, y, random_state=np.random):
"""Create constraints from labels and learn the RCA model.
Needs num_constraints specified in constructor.
Parameters
----------
X : (n x d) data matrix
each row corresponds to a single instance
y : (n) data labels
random_state : a random.seed object to fix the random_state if needed.
"""
(X, y) = self._prepare_inputs(X, y, ensure_min_samples=2)
chunks = Constraints(y).chunks(num_chunks=self.num_chunks, chunk_size=self.chunk_size, random_state=random_state)
return RCA.fit(self, X, chunks) |
def _make_split(self):
""" This method makes the split
"""
self.tile_dict = {}
wfs = WebFeatureService(self.area_bbox, self.time_interval, data_source=self.data_source,
instance_id=self.instance_id)
date_list = wfs.get_dates()
geometry_list = wfs.get_geometries()
for tile_info, (date, geometry) in zip(wfs, zip(date_list, geometry_list)):
tile_name = ''.join(tile_info['properties']['path'].split('/')[4:7])
if tile_name not in self.tile_dict:
self.tile_dict[tile_name] = {'bbox': BBox(tile_info['properties']['mbr'],
crs=tile_info['properties']['crs']),
'times': [],
'geometries': []}
self.tile_dict[tile_name]['times'].append(date)
self.tile_dict[tile_name]['geometries'].append(geometry)
self.tile_dict = {tile_name: tile_props for tile_name, tile_props in self.tile_dict.items() if
self._intersects_area(tile_props['bbox'])}
self.bbox_list = []
self.info_list = []
for tile_name, tile_info in self.tile_dict.items():
tile_bbox = tile_info['bbox']
bbox_splitter = BBoxSplitter([tile_bbox.geometry], tile_bbox.crs,
split_shape=self.tile_split_shape)
for bbox, info in zip(bbox_splitter.get_bbox_list(), bbox_splitter.get_info_list()):
if self._intersects_area(bbox):
info['tile'] = tile_name
self.bbox_list.append(bbox)
self.info_list.append(info) | def function[_make_split, parameter[self]]:
constant[ This method makes the split
]
name[self].tile_dict assign[=] dictionary[[], []]
variable[wfs] assign[=] call[name[WebFeatureService], parameter[name[self].area_bbox, name[self].time_interval]]
variable[date_list] assign[=] call[name[wfs].get_dates, parameter[]]
variable[geometry_list] assign[=] call[name[wfs].get_geometries, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b18e5c90>, <ast.Tuple object at 0x7da1b18e69e0>]]] in starred[call[name[zip], parameter[name[wfs], call[name[zip], parameter[name[date_list], name[geometry_list]]]]]] begin[:]
variable[tile_name] assign[=] call[constant[].join, parameter[call[call[call[call[name[tile_info]][constant[properties]]][constant[path]].split, parameter[constant[/]]]][<ast.Slice object at 0x7da1b18e7ee0>]]]
if compare[name[tile_name] <ast.NotIn object at 0x7da2590d7190> name[self].tile_dict] begin[:]
call[name[self].tile_dict][name[tile_name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b18e5720>, <ast.Constant object at 0x7da1b18e7550>, <ast.Constant object at 0x7da1b18e77c0>], [<ast.Call object at 0x7da1b18e5360>, <ast.List object at 0x7da1b18e7e20>, <ast.List object at 0x7da1b18e7130>]]
call[call[call[name[self].tile_dict][name[tile_name]]][constant[times]].append, parameter[name[date]]]
call[call[call[name[self].tile_dict][name[tile_name]]][constant[geometries]].append, parameter[name[geometry]]]
name[self].tile_dict assign[=] <ast.DictComp object at 0x7da1b18e4c10>
name[self].bbox_list assign[=] list[[]]
name[self].info_list assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b18e4070>, <ast.Name object at 0x7da1b18e7b80>]]] in starred[call[name[self].tile_dict.items, parameter[]]] begin[:]
variable[tile_bbox] assign[=] call[name[tile_info]][constant[bbox]]
variable[bbox_splitter] assign[=] call[name[BBoxSplitter], parameter[list[[<ast.Attribute object at 0x7da1b18e4e80>]], name[tile_bbox].crs]]
for taget[tuple[[<ast.Name object at 0x7da1b18e67a0>, <ast.Name object at 0x7da1b18e7c40>]]] in starred[call[name[zip], parameter[call[name[bbox_splitter].get_bbox_list, parameter[]], call[name[bbox_splitter].get_info_list, parameter[]]]]] begin[:]
if call[name[self]._intersects_area, parameter[name[bbox]]] begin[:]
call[name[info]][constant[tile]] assign[=] name[tile_name]
call[name[self].bbox_list.append, parameter[name[bbox]]]
call[name[self].info_list.append, parameter[name[info]]] | keyword[def] identifier[_make_split] ( identifier[self] ):
literal[string]
identifier[self] . identifier[tile_dict] ={}
identifier[wfs] = identifier[WebFeatureService] ( identifier[self] . identifier[area_bbox] , identifier[self] . identifier[time_interval] , identifier[data_source] = identifier[self] . identifier[data_source] ,
identifier[instance_id] = identifier[self] . identifier[instance_id] )
identifier[date_list] = identifier[wfs] . identifier[get_dates] ()
identifier[geometry_list] = identifier[wfs] . identifier[get_geometries] ()
keyword[for] identifier[tile_info] ,( identifier[date] , identifier[geometry] ) keyword[in] identifier[zip] ( identifier[wfs] , identifier[zip] ( identifier[date_list] , identifier[geometry_list] )):
identifier[tile_name] = literal[string] . identifier[join] ( identifier[tile_info] [ literal[string] ][ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] : literal[int] ])
keyword[if] identifier[tile_name] keyword[not] keyword[in] identifier[self] . identifier[tile_dict] :
identifier[self] . identifier[tile_dict] [ identifier[tile_name] ]={ literal[string] : identifier[BBox] ( identifier[tile_info] [ literal[string] ][ literal[string] ],
identifier[crs] = identifier[tile_info] [ literal[string] ][ literal[string] ]),
literal[string] :[],
literal[string] :[]}
identifier[self] . identifier[tile_dict] [ identifier[tile_name] ][ literal[string] ]. identifier[append] ( identifier[date] )
identifier[self] . identifier[tile_dict] [ identifier[tile_name] ][ literal[string] ]. identifier[append] ( identifier[geometry] )
identifier[self] . identifier[tile_dict] ={ identifier[tile_name] : identifier[tile_props] keyword[for] identifier[tile_name] , identifier[tile_props] keyword[in] identifier[self] . identifier[tile_dict] . identifier[items] () keyword[if]
identifier[self] . identifier[_intersects_area] ( identifier[tile_props] [ literal[string] ])}
identifier[self] . identifier[bbox_list] =[]
identifier[self] . identifier[info_list] =[]
keyword[for] identifier[tile_name] , identifier[tile_info] keyword[in] identifier[self] . identifier[tile_dict] . identifier[items] ():
identifier[tile_bbox] = identifier[tile_info] [ literal[string] ]
identifier[bbox_splitter] = identifier[BBoxSplitter] ([ identifier[tile_bbox] . identifier[geometry] ], identifier[tile_bbox] . identifier[crs] ,
identifier[split_shape] = identifier[self] . identifier[tile_split_shape] )
keyword[for] identifier[bbox] , identifier[info] keyword[in] identifier[zip] ( identifier[bbox_splitter] . identifier[get_bbox_list] (), identifier[bbox_splitter] . identifier[get_info_list] ()):
keyword[if] identifier[self] . identifier[_intersects_area] ( identifier[bbox] ):
identifier[info] [ literal[string] ]= identifier[tile_name]
identifier[self] . identifier[bbox_list] . identifier[append] ( identifier[bbox] )
identifier[self] . identifier[info_list] . identifier[append] ( identifier[info] ) | def _make_split(self):
""" This method makes the split
"""
self.tile_dict = {}
wfs = WebFeatureService(self.area_bbox, self.time_interval, data_source=self.data_source, instance_id=self.instance_id)
date_list = wfs.get_dates()
geometry_list = wfs.get_geometries()
for (tile_info, (date, geometry)) in zip(wfs, zip(date_list, geometry_list)):
tile_name = ''.join(tile_info['properties']['path'].split('/')[4:7])
if tile_name not in self.tile_dict:
self.tile_dict[tile_name] = {'bbox': BBox(tile_info['properties']['mbr'], crs=tile_info['properties']['crs']), 'times': [], 'geometries': []} # depends on [control=['if'], data=['tile_name']]
self.tile_dict[tile_name]['times'].append(date)
self.tile_dict[tile_name]['geometries'].append(geometry) # depends on [control=['for'], data=[]]
self.tile_dict = {tile_name: tile_props for (tile_name, tile_props) in self.tile_dict.items() if self._intersects_area(tile_props['bbox'])}
self.bbox_list = []
self.info_list = []
for (tile_name, tile_info) in self.tile_dict.items():
tile_bbox = tile_info['bbox']
bbox_splitter = BBoxSplitter([tile_bbox.geometry], tile_bbox.crs, split_shape=self.tile_split_shape)
for (bbox, info) in zip(bbox_splitter.get_bbox_list(), bbox_splitter.get_info_list()):
if self._intersects_area(bbox):
info['tile'] = tile_name
self.bbox_list.append(bbox)
self.info_list.append(info) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] |
def read_param_file(filename, delimiter=None):
"""Unpacks a parameter file into a dictionary
Reads a parameter file of format::
Param1,0,1,Group1,dist1
Param2,0,1,Group2,dist2
Param3,0,1,Group3,dist3
(Group and Dist columns are optional)
Returns a dictionary containing:
- names - the names of the parameters
- bounds - a list of lists of lower and upper bounds
- num_vars - a scalar indicating the number of variables
(the length of names)
- groups - a list of group names (strings) for each variable
- dists - a list of distributions for the problem,
None if not specified or all uniform
Arguments
---------
filename : str
The path to the parameter file
delimiter : str, default=None
The delimiter used in the file to distinguish between columns
"""
names = []
bounds = []
groups = []
dists = []
num_vars = 0
fieldnames = ['name', 'lower_bound', 'upper_bound', 'group', 'dist']
with open(filename, 'rU') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=delimiter)
csvfile.seek(0)
reader = csv.DictReader(
csvfile, fieldnames=fieldnames, dialect=dialect)
for row in reader:
if row['name'].strip().startswith('#'):
pass
else:
num_vars += 1
names.append(row['name'])
bounds.append(
[float(row['lower_bound']), float(row['upper_bound'])])
# If the fourth column does not contain a group name, use
# the parameter name
if row['group'] is None:
groups.append(row['name'])
elif row['group'] is 'NA':
groups.append(row['name'])
else:
groups.append(row['group'])
# If the fifth column does not contain a distribution
# use uniform
if row['dist'] is None:
dists.append('unif')
else:
dists.append(row['dist'])
if groups == names:
groups = None
elif len(set(groups)) == 1:
raise ValueError('''Only one group defined, results will not be
meaningful''')
# setting dists to none if all are uniform
# because non-uniform scaling is not needed
if all([d == 'unif' for d in dists]):
dists = None
return {'names': names, 'bounds': bounds, 'num_vars': num_vars,
'groups': groups, 'dists': dists} | def function[read_param_file, parameter[filename, delimiter]]:
constant[Unpacks a parameter file into a dictionary
Reads a parameter file of format::
Param1,0,1,Group1,dist1
Param2,0,1,Group2,dist2
Param3,0,1,Group3,dist3
(Group and Dist columns are optional)
Returns a dictionary containing:
- names - the names of the parameters
- bounds - a list of lists of lower and upper bounds
- num_vars - a scalar indicating the number of variables
(the length of names)
- groups - a list of group names (strings) for each variable
- dists - a list of distributions for the problem,
None if not specified or all uniform
Arguments
---------
filename : str
The path to the parameter file
delimiter : str, default=None
The delimiter used in the file to distinguish between columns
]
variable[names] assign[=] list[[]]
variable[bounds] assign[=] list[[]]
variable[groups] assign[=] list[[]]
variable[dists] assign[=] list[[]]
variable[num_vars] assign[=] constant[0]
variable[fieldnames] assign[=] list[[<ast.Constant object at 0x7da1b189cb50>, <ast.Constant object at 0x7da1b189f310>, <ast.Constant object at 0x7da1b189f460>, <ast.Constant object at 0x7da1b189d570>, <ast.Constant object at 0x7da1b189f880>]]
with call[name[open], parameter[name[filename], constant[rU]]] begin[:]
variable[dialect] assign[=] call[call[name[csv].Sniffer, parameter[]].sniff, parameter[call[name[csvfile].read, parameter[constant[1024]]]]]
call[name[csvfile].seek, parameter[constant[0]]]
variable[reader] assign[=] call[name[csv].DictReader, parameter[name[csvfile]]]
for taget[name[row]] in starred[name[reader]] begin[:]
if call[call[call[name[row]][constant[name]].strip, parameter[]].startswith, parameter[constant[#]]] begin[:]
pass
if compare[name[groups] equal[==] name[names]] begin[:]
variable[groups] assign[=] constant[None]
if call[name[all], parameter[<ast.ListComp object at 0x7da1b18df760>]] begin[:]
variable[dists] assign[=] constant[None]
return[dictionary[[<ast.Constant object at 0x7da1b18dfc10>, <ast.Constant object at 0x7da1b18dd1b0>, <ast.Constant object at 0x7da1b18de530>, <ast.Constant object at 0x7da1b18dfd60>, <ast.Constant object at 0x7da1b18de2f0>], [<ast.Name object at 0x7da1b18de050>, <ast.Name object at 0x7da1b18df400>, <ast.Name object at 0x7da1b18ddd20>, <ast.Name object at 0x7da1b18def50>, <ast.Name object at 0x7da1b18dc400>]]] | keyword[def] identifier[read_param_file] ( identifier[filename] , identifier[delimiter] = keyword[None] ):
literal[string]
identifier[names] =[]
identifier[bounds] =[]
identifier[groups] =[]
identifier[dists] =[]
identifier[num_vars] = literal[int]
identifier[fieldnames] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[csvfile] :
identifier[dialect] = identifier[csv] . identifier[Sniffer] (). identifier[sniff] ( identifier[csvfile] . identifier[read] ( literal[int] ), identifier[delimiters] = identifier[delimiter] )
identifier[csvfile] . identifier[seek] ( literal[int] )
identifier[reader] = identifier[csv] . identifier[DictReader] (
identifier[csvfile] , identifier[fieldnames] = identifier[fieldnames] , identifier[dialect] = identifier[dialect] )
keyword[for] identifier[row] keyword[in] identifier[reader] :
keyword[if] identifier[row] [ literal[string] ]. identifier[strip] (). identifier[startswith] ( literal[string] ):
keyword[pass]
keyword[else] :
identifier[num_vars] += literal[int]
identifier[names] . identifier[append] ( identifier[row] [ literal[string] ])
identifier[bounds] . identifier[append] (
[ identifier[float] ( identifier[row] [ literal[string] ]), identifier[float] ( identifier[row] [ literal[string] ])])
keyword[if] identifier[row] [ literal[string] ] keyword[is] keyword[None] :
identifier[groups] . identifier[append] ( identifier[row] [ literal[string] ])
keyword[elif] identifier[row] [ literal[string] ] keyword[is] literal[string] :
identifier[groups] . identifier[append] ( identifier[row] [ literal[string] ])
keyword[else] :
identifier[groups] . identifier[append] ( identifier[row] [ literal[string] ])
keyword[if] identifier[row] [ literal[string] ] keyword[is] keyword[None] :
identifier[dists] . identifier[append] ( literal[string] )
keyword[else] :
identifier[dists] . identifier[append] ( identifier[row] [ literal[string] ])
keyword[if] identifier[groups] == identifier[names] :
identifier[groups] = keyword[None]
keyword[elif] identifier[len] ( identifier[set] ( identifier[groups] ))== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[all] ([ identifier[d] == literal[string] keyword[for] identifier[d] keyword[in] identifier[dists] ]):
identifier[dists] = keyword[None]
keyword[return] { literal[string] : identifier[names] , literal[string] : identifier[bounds] , literal[string] : identifier[num_vars] ,
literal[string] : identifier[groups] , literal[string] : identifier[dists] } | def read_param_file(filename, delimiter=None):
"""Unpacks a parameter file into a dictionary
Reads a parameter file of format::
Param1,0,1,Group1,dist1
Param2,0,1,Group2,dist2
Param3,0,1,Group3,dist3
(Group and Dist columns are optional)
Returns a dictionary containing:
- names - the names of the parameters
- bounds - a list of lists of lower and upper bounds
- num_vars - a scalar indicating the number of variables
(the length of names)
- groups - a list of group names (strings) for each variable
- dists - a list of distributions for the problem,
None if not specified or all uniform
Arguments
---------
filename : str
The path to the parameter file
delimiter : str, default=None
The delimiter used in the file to distinguish between columns
"""
names = []
bounds = []
groups = []
dists = []
num_vars = 0
fieldnames = ['name', 'lower_bound', 'upper_bound', 'group', 'dist']
with open(filename, 'rU') as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024), delimiters=delimiter)
csvfile.seek(0)
reader = csv.DictReader(csvfile, fieldnames=fieldnames, dialect=dialect)
for row in reader:
if row['name'].strip().startswith('#'):
pass # depends on [control=['if'], data=[]]
else:
num_vars += 1
names.append(row['name'])
bounds.append([float(row['lower_bound']), float(row['upper_bound'])])
# If the fourth column does not contain a group name, use
# the parameter name
if row['group'] is None:
groups.append(row['name']) # depends on [control=['if'], data=[]]
elif row['group'] is 'NA':
groups.append(row['name']) # depends on [control=['if'], data=[]]
else:
groups.append(row['group'])
# If the fifth column does not contain a distribution
# use uniform
if row['dist'] is None:
dists.append('unif') # depends on [control=['if'], data=[]]
else:
dists.append(row['dist']) # depends on [control=['for'], data=['row']] # depends on [control=['with'], data=['csvfile']]
if groups == names:
groups = None # depends on [control=['if'], data=['groups']]
elif len(set(groups)) == 1:
raise ValueError('Only one group defined, results will not be\n meaningful') # depends on [control=['if'], data=[]]
# setting dists to none if all are uniform
# because non-uniform scaling is not needed
if all([d == 'unif' for d in dists]):
dists = None # depends on [control=['if'], data=[]]
return {'names': names, 'bounds': bounds, 'num_vars': num_vars, 'groups': groups, 'dists': dists} |
def api_method(f):
'''
Redirect pylivetrader.api.* operations to the algorithm
in the local context.
'''
@wraps(f)
def wrapped(*args, **kwargs):
# Get the instance and call the method
algorithm = get_context()
if algorithm is None:
raise RuntimeError(
'{} method must be called during live trading'.format(
f.__name__))
return getattr(algorithm, f.__name__)(*args, **kwargs)
# register api redirection
setattr(pylivetrader.api, f.__name__, wrapped)
pylivetrader.api.__all__.append(f.__name__)
f.is_api_method = True
return f | def function[api_method, parameter[f]]:
constant[
Redirect pylivetrader.api.* operations to the algorithm
in the local context.
]
def function[wrapped, parameter[]]:
variable[algorithm] assign[=] call[name[get_context], parameter[]]
if compare[name[algorithm] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b21d4730>
return[call[call[name[getattr], parameter[name[algorithm], name[f].__name__]], parameter[<ast.Starred object at 0x7da1b21d6920>]]]
call[name[setattr], parameter[name[pylivetrader].api, name[f].__name__, name[wrapped]]]
call[name[pylivetrader].api.__all__.append, parameter[name[f].__name__]]
name[f].is_api_method assign[=] constant[True]
return[name[f]] | keyword[def] identifier[api_method] ( identifier[f] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapped] (* identifier[args] ,** identifier[kwargs] ):
identifier[algorithm] = identifier[get_context] ()
keyword[if] identifier[algorithm] keyword[is] keyword[None] :
keyword[raise] identifier[RuntimeError] (
literal[string] . identifier[format] (
identifier[f] . identifier[__name__] ))
keyword[return] identifier[getattr] ( identifier[algorithm] , identifier[f] . identifier[__name__] )(* identifier[args] ,** identifier[kwargs] )
identifier[setattr] ( identifier[pylivetrader] . identifier[api] , identifier[f] . identifier[__name__] , identifier[wrapped] )
identifier[pylivetrader] . identifier[api] . identifier[__all__] . identifier[append] ( identifier[f] . identifier[__name__] )
identifier[f] . identifier[is_api_method] = keyword[True]
keyword[return] identifier[f] | def api_method(f):
"""
Redirect pylivetrader.api.* operations to the algorithm
in the local context.
"""
@wraps(f)
def wrapped(*args, **kwargs):
# Get the instance and call the method
algorithm = get_context()
if algorithm is None:
raise RuntimeError('{} method must be called during live trading'.format(f.__name__)) # depends on [control=['if'], data=[]]
return getattr(algorithm, f.__name__)(*args, **kwargs)
# register api redirection
setattr(pylivetrader.api, f.__name__, wrapped)
pylivetrader.api.__all__.append(f.__name__)
f.is_api_method = True
return f |
def to_dnf(self):
"""Return an equivalent expression in disjunctive normal form."""
node = self.node.to_dnf()
if node is self.node:
return self
else:
return _expr(node) | def function[to_dnf, parameter[self]]:
constant[Return an equivalent expression in disjunctive normal form.]
variable[node] assign[=] call[name[self].node.to_dnf, parameter[]]
if compare[name[node] is name[self].node] begin[:]
return[name[self]] | keyword[def] identifier[to_dnf] ( identifier[self] ):
literal[string]
identifier[node] = identifier[self] . identifier[node] . identifier[to_dnf] ()
keyword[if] identifier[node] keyword[is] identifier[self] . identifier[node] :
keyword[return] identifier[self]
keyword[else] :
keyword[return] identifier[_expr] ( identifier[node] ) | def to_dnf(self):
"""Return an equivalent expression in disjunctive normal form."""
node = self.node.to_dnf()
if node is self.node:
return self # depends on [control=['if'], data=[]]
else:
return _expr(node) |
def get_authn_header(self, request, authn_method, **kwargs):
"""
Construct an authorization specification to be sent in the
HTTP header.
:param request: The service request
:param authn_method: Which authentication/authorization method to use
:param kwargs: Extra keyword arguments
:return: A set of keyword arguments to be sent in the HTTP header.
"""
headers = {}
# If I should deal with client authentication
if authn_method:
h_arg = self.init_authentication_method(request, authn_method,
**kwargs)
try:
headers = h_arg['headers']
except KeyError:
pass
return headers | def function[get_authn_header, parameter[self, request, authn_method]]:
constant[
Construct an authorization specification to be sent in the
HTTP header.
:param request: The service request
:param authn_method: Which authentication/authorization method to use
:param kwargs: Extra keyword arguments
:return: A set of keyword arguments to be sent in the HTTP header.
]
variable[headers] assign[=] dictionary[[], []]
if name[authn_method] begin[:]
variable[h_arg] assign[=] call[name[self].init_authentication_method, parameter[name[request], name[authn_method]]]
<ast.Try object at 0x7da1b1bdd7e0>
return[name[headers]] | keyword[def] identifier[get_authn_header] ( identifier[self] , identifier[request] , identifier[authn_method] ,** identifier[kwargs] ):
literal[string]
identifier[headers] ={}
keyword[if] identifier[authn_method] :
identifier[h_arg] = identifier[self] . identifier[init_authentication_method] ( identifier[request] , identifier[authn_method] ,
** identifier[kwargs] )
keyword[try] :
identifier[headers] = identifier[h_arg] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[return] identifier[headers] | def get_authn_header(self, request, authn_method, **kwargs):
"""
Construct an authorization specification to be sent in the
HTTP header.
:param request: The service request
:param authn_method: Which authentication/authorization method to use
:param kwargs: Extra keyword arguments
:return: A set of keyword arguments to be sent in the HTTP header.
"""
headers = {}
# If I should deal with client authentication
if authn_method:
h_arg = self.init_authentication_method(request, authn_method, **kwargs)
try:
headers = h_arg['headers'] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return headers |
def to_json(self):
"""
Converts this object to its json representation following the Telegram API guidelines described here:
https://core.telegram.org/bots/api#replykeyboardmarkup
:return:
"""
json_dict = {'keyboard': self.keyboard}
if self.one_time_keyboard:
json_dict['one_time_keyboard'] = True
if self.resize_keyboard:
json_dict['resize_keyboard'] = True
if self.selective:
json_dict['selective'] = True
return json.dumps(json_dict) | def function[to_json, parameter[self]]:
constant[
Converts this object to its json representation following the Telegram API guidelines described here:
https://core.telegram.org/bots/api#replykeyboardmarkup
:return:
]
variable[json_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c7a380>], [<ast.Attribute object at 0x7da1b1c784f0>]]
if name[self].one_time_keyboard begin[:]
call[name[json_dict]][constant[one_time_keyboard]] assign[=] constant[True]
if name[self].resize_keyboard begin[:]
call[name[json_dict]][constant[resize_keyboard]] assign[=] constant[True]
if name[self].selective begin[:]
call[name[json_dict]][constant[selective]] assign[=] constant[True]
return[call[name[json].dumps, parameter[name[json_dict]]]] | keyword[def] identifier[to_json] ( identifier[self] ):
literal[string]
identifier[json_dict] ={ literal[string] : identifier[self] . identifier[keyboard] }
keyword[if] identifier[self] . identifier[one_time_keyboard] :
identifier[json_dict] [ literal[string] ]= keyword[True]
keyword[if] identifier[self] . identifier[resize_keyboard] :
identifier[json_dict] [ literal[string] ]= keyword[True]
keyword[if] identifier[self] . identifier[selective] :
identifier[json_dict] [ literal[string] ]= keyword[True]
keyword[return] identifier[json] . identifier[dumps] ( identifier[json_dict] ) | def to_json(self):
"""
Converts this object to its json representation following the Telegram API guidelines described here:
https://core.telegram.org/bots/api#replykeyboardmarkup
:return:
"""
json_dict = {'keyboard': self.keyboard}
if self.one_time_keyboard:
json_dict['one_time_keyboard'] = True # depends on [control=['if'], data=[]]
if self.resize_keyboard:
json_dict['resize_keyboard'] = True # depends on [control=['if'], data=[]]
if self.selective:
json_dict['selective'] = True # depends on [control=['if'], data=[]]
return json.dumps(json_dict) |
def get_executor(self, create=1):
"""Fetch the action executor for this node. Create one if
there isn't already one, and requested to do so."""
try:
executor = self.executor
except AttributeError:
if not create:
raise
try:
act = self.builder.action
except AttributeError:
executor = SCons.Executor.Null(targets=[self])
else:
executor = SCons.Executor.Executor(act,
self.env or self.builder.env,
[self.builder.overrides],
[self],
self.sources)
self.executor = executor
return executor | def function[get_executor, parameter[self, create]]:
constant[Fetch the action executor for this node. Create one if
there isn't already one, and requested to do so.]
<ast.Try object at 0x7da204622da0>
return[name[executor]] | keyword[def] identifier[get_executor] ( identifier[self] , identifier[create] = literal[int] ):
literal[string]
keyword[try] :
identifier[executor] = identifier[self] . identifier[executor]
keyword[except] identifier[AttributeError] :
keyword[if] keyword[not] identifier[create] :
keyword[raise]
keyword[try] :
identifier[act] = identifier[self] . identifier[builder] . identifier[action]
keyword[except] identifier[AttributeError] :
identifier[executor] = identifier[SCons] . identifier[Executor] . identifier[Null] ( identifier[targets] =[ identifier[self] ])
keyword[else] :
identifier[executor] = identifier[SCons] . identifier[Executor] . identifier[Executor] ( identifier[act] ,
identifier[self] . identifier[env] keyword[or] identifier[self] . identifier[builder] . identifier[env] ,
[ identifier[self] . identifier[builder] . identifier[overrides] ],
[ identifier[self] ],
identifier[self] . identifier[sources] )
identifier[self] . identifier[executor] = identifier[executor]
keyword[return] identifier[executor] | def get_executor(self, create=1):
"""Fetch the action executor for this node. Create one if
there isn't already one, and requested to do so."""
try:
executor = self.executor # depends on [control=['try'], data=[]]
except AttributeError:
if not create:
raise # depends on [control=['if'], data=[]]
try:
act = self.builder.action # depends on [control=['try'], data=[]]
except AttributeError:
executor = SCons.Executor.Null(targets=[self]) # depends on [control=['except'], data=[]]
else:
executor = SCons.Executor.Executor(act, self.env or self.builder.env, [self.builder.overrides], [self], self.sources)
self.executor = executor # depends on [control=['except'], data=[]]
return executor |
def get_shop(self, shop_id=0):
"""
查询门店的WiFi信息
http://mp.weixin.qq.com/wiki/15/bcfb5d4578ea818b89913472cf2bbf8f.html
:param shop_id: 门店 ID
:return: 返回的 JSON 数据包
"""
res = self._post(
'shop/get',
data={
'shop_id': shop_id,
},
result_processor=lambda x: x['data']
)
return res | def function[get_shop, parameter[self, shop_id]]:
constant[
查询门店的WiFi信息
http://mp.weixin.qq.com/wiki/15/bcfb5d4578ea818b89913472cf2bbf8f.html
:param shop_id: 门店 ID
:return: 返回的 JSON 数据包
]
variable[res] assign[=] call[name[self]._post, parameter[constant[shop/get]]]
return[name[res]] | keyword[def] identifier[get_shop] ( identifier[self] , identifier[shop_id] = literal[int] ):
literal[string]
identifier[res] = identifier[self] . identifier[_post] (
literal[string] ,
identifier[data] ={
literal[string] : identifier[shop_id] ,
},
identifier[result_processor] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ]
)
keyword[return] identifier[res] | def get_shop(self, shop_id=0):
"""
查询门店的WiFi信息
http://mp.weixin.qq.com/wiki/15/bcfb5d4578ea818b89913472cf2bbf8f.html
:param shop_id: 门店 ID
:return: 返回的 JSON 数据包
"""
res = self._post('shop/get', data={'shop_id': shop_id}, result_processor=lambda x: x['data'])
return res |
def get_clients(self):
"""
Returns the clients stored in the instance of UAA.
"""
self.assert_has_permission('clients.read')
uri = self.uri + '/oauth/clients'
headers = self.get_authorization_headers()
response = requests.get(uri, headers=headers)
return response.json()['resources'] | def function[get_clients, parameter[self]]:
constant[
Returns the clients stored in the instance of UAA.
]
call[name[self].assert_has_permission, parameter[constant[clients.read]]]
variable[uri] assign[=] binary_operation[name[self].uri + constant[/oauth/clients]]
variable[headers] assign[=] call[name[self].get_authorization_headers, parameter[]]
variable[response] assign[=] call[name[requests].get, parameter[name[uri]]]
return[call[call[name[response].json, parameter[]]][constant[resources]]] | keyword[def] identifier[get_clients] ( identifier[self] ):
literal[string]
identifier[self] . identifier[assert_has_permission] ( literal[string] )
identifier[uri] = identifier[self] . identifier[uri] + literal[string]
identifier[headers] = identifier[self] . identifier[get_authorization_headers] ()
identifier[response] = identifier[requests] . identifier[get] ( identifier[uri] , identifier[headers] = identifier[headers] )
keyword[return] identifier[response] . identifier[json] ()[ literal[string] ] | def get_clients(self):
"""
Returns the clients stored in the instance of UAA.
"""
self.assert_has_permission('clients.read')
uri = self.uri + '/oauth/clients'
headers = self.get_authorization_headers()
response = requests.get(uri, headers=headers)
return response.json()['resources'] |
def __collect_file(self, filename, keep_original=False):
"""
Move or copy single file to artifacts dir
"""
dest = self.artifacts_dir + '/' + os.path.basename(filename)
logger.debug("Collecting file: %s to %s", filename, dest)
if not filename or not os.path.exists(filename):
logger.warning("File not found to collect: %s", filename)
return
if os.path.exists(dest):
# FIXME: 3 find a way to store artifacts anyway
logger.warning("File already exists: %s", dest)
return
if keep_original:
shutil.copy(filename, self.artifacts_dir)
else:
shutil.move(filename, self.artifacts_dir)
os.chmod(dest, 0o644) | def function[__collect_file, parameter[self, filename, keep_original]]:
constant[
Move or copy single file to artifacts dir
]
variable[dest] assign[=] binary_operation[binary_operation[name[self].artifacts_dir + constant[/]] + call[name[os].path.basename, parameter[name[filename]]]]
call[name[logger].debug, parameter[constant[Collecting file: %s to %s], name[filename], name[dest]]]
if <ast.BoolOp object at 0x7da1b03179a0> begin[:]
call[name[logger].warning, parameter[constant[File not found to collect: %s], name[filename]]]
return[None]
if call[name[os].path.exists, parameter[name[dest]]] begin[:]
call[name[logger].warning, parameter[constant[File already exists: %s], name[dest]]]
return[None]
if name[keep_original] begin[:]
call[name[shutil].copy, parameter[name[filename], name[self].artifacts_dir]]
call[name[os].chmod, parameter[name[dest], constant[420]]] | keyword[def] identifier[__collect_file] ( identifier[self] , identifier[filename] , identifier[keep_original] = keyword[False] ):
literal[string]
identifier[dest] = identifier[self] . identifier[artifacts_dir] + literal[string] + identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[filename] , identifier[dest] )
keyword[if] keyword[not] identifier[filename] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
identifier[logger] . identifier[warning] ( literal[string] , identifier[filename] )
keyword[return]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[dest] ):
identifier[logger] . identifier[warning] ( literal[string] , identifier[dest] )
keyword[return]
keyword[if] identifier[keep_original] :
identifier[shutil] . identifier[copy] ( identifier[filename] , identifier[self] . identifier[artifacts_dir] )
keyword[else] :
identifier[shutil] . identifier[move] ( identifier[filename] , identifier[self] . identifier[artifacts_dir] )
identifier[os] . identifier[chmod] ( identifier[dest] , literal[int] ) | def __collect_file(self, filename, keep_original=False):
"""
Move or copy single file to artifacts dir
"""
dest = self.artifacts_dir + '/' + os.path.basename(filename)
logger.debug('Collecting file: %s to %s', filename, dest)
if not filename or not os.path.exists(filename):
logger.warning('File not found to collect: %s', filename)
return # depends on [control=['if'], data=[]]
if os.path.exists(dest):
# FIXME: 3 find a way to store artifacts anyway
logger.warning('File already exists: %s', dest)
return # depends on [control=['if'], data=[]]
if keep_original:
shutil.copy(filename, self.artifacts_dir) # depends on [control=['if'], data=[]]
else:
shutil.move(filename, self.artifacts_dir)
os.chmod(dest, 420) |
def get_request_handler(self):
"""Return the Endpoints defined :attr:`Endpoint.request_handler`.
:returns: A instance of the Endpoint specified :class:`RequestHandler`.
:rtype: :class:`RequestHandler`
"""
assert self.request_handler is not None, \
'Please define a request_handler ' \
' for Endpoint: %s' % self.__class__.__name__
return self.request_handler(self, **self.get_request_handler_params()) | def function[get_request_handler, parameter[self]]:
constant[Return the Endpoints defined :attr:`Endpoint.request_handler`.
:returns: A instance of the Endpoint specified :class:`RequestHandler`.
:rtype: :class:`RequestHandler`
]
assert[compare[name[self].request_handler is_not constant[None]]]
return[call[name[self].request_handler, parameter[name[self]]]] | keyword[def] identifier[get_request_handler] ( identifier[self] ):
literal[string]
keyword[assert] identifier[self] . identifier[request_handler] keyword[is] keyword[not] keyword[None] , literal[string] literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
keyword[return] identifier[self] . identifier[request_handler] ( identifier[self] ,** identifier[self] . identifier[get_request_handler_params] ()) | def get_request_handler(self):
"""Return the Endpoints defined :attr:`Endpoint.request_handler`.
:returns: A instance of the Endpoint specified :class:`RequestHandler`.
:rtype: :class:`RequestHandler`
"""
assert self.request_handler is not None, 'Please define a request_handler for Endpoint: %s' % self.__class__.__name__
return self.request_handler(self, **self.get_request_handler_params()) |
def findOrDie(s):
"""
Look up an amino acid.
@param s: A C{str} amino acid specifier. This may be a full name,
a 3-letter abbreviation or a 1-letter abbreviation. Case is ignored.
@return: An C{AminoAcid} instance, if one can be found. Else exit.
"""
aa = find(s)
if aa:
return aa
else:
print('Unknown amino acid or codon: %s' % s, file=sys.stderr)
print('Valid arguments are: %s.' % list(CODONS.keys()),
file=sys.stderr)
sys.exit(1) | def function[findOrDie, parameter[s]]:
constant[
Look up an amino acid.
@param s: A C{str} amino acid specifier. This may be a full name,
a 3-letter abbreviation or a 1-letter abbreviation. Case is ignored.
@return: An C{AminoAcid} instance, if one can be found. Else exit.
]
variable[aa] assign[=] call[name[find], parameter[name[s]]]
if name[aa] begin[:]
return[name[aa]] | keyword[def] identifier[findOrDie] ( identifier[s] ):
literal[string]
identifier[aa] = identifier[find] ( identifier[s] )
keyword[if] identifier[aa] :
keyword[return] identifier[aa]
keyword[else] :
identifier[print] ( literal[string] % identifier[s] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( literal[string] % identifier[list] ( identifier[CODONS] . identifier[keys] ()),
identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] ) | def findOrDie(s):
"""
Look up an amino acid.
@param s: A C{str} amino acid specifier. This may be a full name,
a 3-letter abbreviation or a 1-letter abbreviation. Case is ignored.
@return: An C{AminoAcid} instance, if one can be found. Else exit.
"""
aa = find(s)
if aa:
return aa # depends on [control=['if'], data=[]]
else:
print('Unknown amino acid or codon: %s' % s, file=sys.stderr)
print('Valid arguments are: %s.' % list(CODONS.keys()), file=sys.stderr)
sys.exit(1) |
def getWorkDirs():
"""get input/output dirs (same input/output layout as for package)"""
# get caller module
caller_fullurl = inspect.stack()[1][1]
caller_relurl = os.path.relpath(caller_fullurl)
caller_modurl = os.path.splitext(caller_relurl)[0]
# split caller_url & append 'Dir' to package name
dirs = caller_modurl.split('/')
dirs[0] = 'data' # TODO de-hardcode
# get, check and create outdir
outDir = os.path.join(*(['output'] + dirs[1:]))
if not os.path.exists(outDir): os.makedirs(outDir)
# get and check indir
dirs.append('input')
inDir = os.path.join(*dirs)
if not os.path.exists(inDir):
logging.critical('create input dir %s to continue!' % inDir)
sys.exit(1)
return inDir, outDir | def function[getWorkDirs, parameter[]]:
constant[get input/output dirs (same input/output layout as for package)]
variable[caller_fullurl] assign[=] call[call[call[name[inspect].stack, parameter[]]][constant[1]]][constant[1]]
variable[caller_relurl] assign[=] call[name[os].path.relpath, parameter[name[caller_fullurl]]]
variable[caller_modurl] assign[=] call[call[name[os].path.splitext, parameter[name[caller_relurl]]]][constant[0]]
variable[dirs] assign[=] call[name[caller_modurl].split, parameter[constant[/]]]
call[name[dirs]][constant[0]] assign[=] constant[data]
variable[outDir] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da20e749540>]]
if <ast.UnaryOp object at 0x7da20e74b220> begin[:]
call[name[os].makedirs, parameter[name[outDir]]]
call[name[dirs].append, parameter[constant[input]]]
variable[inDir] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da1b15e60e0>]]
if <ast.UnaryOp object at 0x7da1b15e77c0> begin[:]
call[name[logging].critical, parameter[binary_operation[constant[create input dir %s to continue!] <ast.Mod object at 0x7da2590d6920> name[inDir]]]]
call[name[sys].exit, parameter[constant[1]]]
return[tuple[[<ast.Name object at 0x7da1b15e5900>, <ast.Name object at 0x7da1b15e6d70>]]] | keyword[def] identifier[getWorkDirs] ():
literal[string]
identifier[caller_fullurl] = identifier[inspect] . identifier[stack] ()[ literal[int] ][ literal[int] ]
identifier[caller_relurl] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[caller_fullurl] )
identifier[caller_modurl] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[caller_relurl] )[ literal[int] ]
identifier[dirs] = identifier[caller_modurl] . identifier[split] ( literal[string] )
identifier[dirs] [ literal[int] ]= literal[string]
identifier[outDir] = identifier[os] . identifier[path] . identifier[join] (*([ literal[string] ]+ identifier[dirs] [ literal[int] :]))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[outDir] ): identifier[os] . identifier[makedirs] ( identifier[outDir] )
identifier[dirs] . identifier[append] ( literal[string] )
identifier[inDir] = identifier[os] . identifier[path] . identifier[join] (* identifier[dirs] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[inDir] ):
identifier[logging] . identifier[critical] ( literal[string] % identifier[inDir] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[inDir] , identifier[outDir] | def getWorkDirs():
"""get input/output dirs (same input/output layout as for package)"""
# get caller module
caller_fullurl = inspect.stack()[1][1]
caller_relurl = os.path.relpath(caller_fullurl)
caller_modurl = os.path.splitext(caller_relurl)[0]
# split caller_url & append 'Dir' to package name
dirs = caller_modurl.split('/')
dirs[0] = 'data' # TODO de-hardcode
# get, check and create outdir
outDir = os.path.join(*['output'] + dirs[1:])
if not os.path.exists(outDir):
os.makedirs(outDir) # depends on [control=['if'], data=[]]
# get and check indir
dirs.append('input')
inDir = os.path.join(*dirs)
if not os.path.exists(inDir):
logging.critical('create input dir %s to continue!' % inDir)
sys.exit(1) # depends on [control=['if'], data=[]]
return (inDir, outDir) |
def labels(self, include_missing=False, include_transforms_for_dims=False):
"""Gets labels for each cube's dimension.
Args
include_missing (bool): Include labels for missing values
Returns
labels (list of lists): Labels for each dimension
"""
return [
dim.labels(include_missing, include_transforms_for_dims)
for dim in self.dimensions
] | def function[labels, parameter[self, include_missing, include_transforms_for_dims]]:
constant[Gets labels for each cube's dimension.
Args
include_missing (bool): Include labels for missing values
Returns
labels (list of lists): Labels for each dimension
]
return[<ast.ListComp object at 0x7da18dc99c00>] | keyword[def] identifier[labels] ( identifier[self] , identifier[include_missing] = keyword[False] , identifier[include_transforms_for_dims] = keyword[False] ):
literal[string]
keyword[return] [
identifier[dim] . identifier[labels] ( identifier[include_missing] , identifier[include_transforms_for_dims] )
keyword[for] identifier[dim] keyword[in] identifier[self] . identifier[dimensions]
] | def labels(self, include_missing=False, include_transforms_for_dims=False):
"""Gets labels for each cube's dimension.
Args
include_missing (bool): Include labels for missing values
Returns
labels (list of lists): Labels for each dimension
"""
return [dim.labels(include_missing, include_transforms_for_dims) for dim in self.dimensions] |
def snap_counts(self):
"""Gets the snap counts for both teams' players and returns them in a
DataFrame. Note: only goes back to 2012.
:returns: DataFrame of snap count data
"""
# TODO: combine duplicate players, see 201312150mia - ThomDa03
doc = self.get_doc()
table_ids = ('vis_snap_counts', 'home_snap_counts')
tms = (self.away(), self.home())
df = pd.concat([
sportsref.utils.parse_table(doc('table#{}'.format(table_id)))
.assign(is_home=bool(i), team=tms[i], opp=tms[i*-1+1])
for i, table_id in enumerate(table_ids)
])
if df.empty:
return df
return df.set_index('player_id') | def function[snap_counts, parameter[self]]:
constant[Gets the snap counts for both teams' players and returns them in a
DataFrame. Note: only goes back to 2012.
:returns: DataFrame of snap count data
]
variable[doc] assign[=] call[name[self].get_doc, parameter[]]
variable[table_ids] assign[=] tuple[[<ast.Constant object at 0x7da20e9549d0>, <ast.Constant object at 0x7da20e957b20>]]
variable[tms] assign[=] tuple[[<ast.Call object at 0x7da20e957c70>, <ast.Call object at 0x7da20e954c10>]]
variable[df] assign[=] call[name[pd].concat, parameter[<ast.ListComp object at 0x7da20e955db0>]]
if name[df].empty begin[:]
return[name[df]]
return[call[name[df].set_index, parameter[constant[player_id]]]] | keyword[def] identifier[snap_counts] ( identifier[self] ):
literal[string]
identifier[doc] = identifier[self] . identifier[get_doc] ()
identifier[table_ids] =( literal[string] , literal[string] )
identifier[tms] =( identifier[self] . identifier[away] (), identifier[self] . identifier[home] ())
identifier[df] = identifier[pd] . identifier[concat] ([
identifier[sportsref] . identifier[utils] . identifier[parse_table] ( identifier[doc] ( literal[string] . identifier[format] ( identifier[table_id] )))
. identifier[assign] ( identifier[is_home] = identifier[bool] ( identifier[i] ), identifier[team] = identifier[tms] [ identifier[i] ], identifier[opp] = identifier[tms] [ identifier[i] *- literal[int] + literal[int] ])
keyword[for] identifier[i] , identifier[table_id] keyword[in] identifier[enumerate] ( identifier[table_ids] )
])
keyword[if] identifier[df] . identifier[empty] :
keyword[return] identifier[df]
keyword[return] identifier[df] . identifier[set_index] ( literal[string] ) | def snap_counts(self):
"""Gets the snap counts for both teams' players and returns them in a
DataFrame. Note: only goes back to 2012.
:returns: DataFrame of snap count data
"""
# TODO: combine duplicate players, see 201312150mia - ThomDa03
doc = self.get_doc()
table_ids = ('vis_snap_counts', 'home_snap_counts')
tms = (self.away(), self.home())
df = pd.concat([sportsref.utils.parse_table(doc('table#{}'.format(table_id))).assign(is_home=bool(i), team=tms[i], opp=tms[i * -1 + 1]) for (i, table_id) in enumerate(table_ids)])
if df.empty:
return df # depends on [control=['if'], data=[]]
return df.set_index('player_id') |
def get_original_before_save(sender, instance, created):
"""Event listener to get the original instance before it's saved."""
if not instance._meta.event_ready or created:
return
instance.get_original() | def function[get_original_before_save, parameter[sender, instance, created]]:
constant[Event listener to get the original instance before it's saved.]
if <ast.BoolOp object at 0x7da18f723670> begin[:]
return[None]
call[name[instance].get_original, parameter[]] | keyword[def] identifier[get_original_before_save] ( identifier[sender] , identifier[instance] , identifier[created] ):
literal[string]
keyword[if] keyword[not] identifier[instance] . identifier[_meta] . identifier[event_ready] keyword[or] identifier[created] :
keyword[return]
identifier[instance] . identifier[get_original] () | def get_original_before_save(sender, instance, created):
"""Event listener to get the original instance before it's saved."""
if not instance._meta.event_ready or created:
return # depends on [control=['if'], data=[]]
instance.get_original() |
def load_api_folder(api_folder_path):
""" load api definitions from api folder.
Args:
api_folder_path (str): api files folder.
api file should be in the following format:
[
{
"api": {
"def": "api_login",
"request": {},
"validate": []
}
},
{
"api": {
"def": "api_logout",
"request": {},
"validate": []
}
}
]
Returns:
dict: api definition mapping.
{
"api_login": {
"function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
"request": {}
},
"api_logout": {
"function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
"request": {}
}
}
"""
api_definition_mapping = {}
api_items_mapping = load_folder_content(api_folder_path)
for api_file_path, api_items in api_items_mapping.items():
# TODO: add JSON schema validation
if isinstance(api_items, list):
for api_item in api_items:
key, api_dict = api_item.popitem()
api_id = api_dict.get("id") or api_dict.get("def") or api_dict.get("name")
if key != "api" or not api_id:
raise exceptions.ParamsError(
"Invalid API defined in {}".format(api_file_path))
if api_id in api_definition_mapping:
raise exceptions.ParamsError(
"Duplicated API ({}) defined in {}".format(api_id, api_file_path))
else:
api_definition_mapping[api_id] = api_dict
elif isinstance(api_items, dict):
if api_file_path in api_definition_mapping:
raise exceptions.ParamsError(
"Duplicated API defined: {}".format(api_file_path))
else:
api_definition_mapping[api_file_path] = api_items
return api_definition_mapping | def function[load_api_folder, parameter[api_folder_path]]:
constant[ load api definitions from api folder.
Args:
api_folder_path (str): api files folder.
api file should be in the following format:
[
{
"api": {
"def": "api_login",
"request": {},
"validate": []
}
},
{
"api": {
"def": "api_logout",
"request": {},
"validate": []
}
}
]
Returns:
dict: api definition mapping.
{
"api_login": {
"function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
"request": {}
},
"api_logout": {
"function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
"request": {}
}
}
]
variable[api_definition_mapping] assign[=] dictionary[[], []]
variable[api_items_mapping] assign[=] call[name[load_folder_content], parameter[name[api_folder_path]]]
for taget[tuple[[<ast.Name object at 0x7da18ede5570>, <ast.Name object at 0x7da18ede5f90>]]] in starred[call[name[api_items_mapping].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[api_items], name[list]]] begin[:]
for taget[name[api_item]] in starred[name[api_items]] begin[:]
<ast.Tuple object at 0x7da18ede4610> assign[=] call[name[api_item].popitem, parameter[]]
variable[api_id] assign[=] <ast.BoolOp object at 0x7da18ede4160>
if <ast.BoolOp object at 0x7da18ede6290> begin[:]
<ast.Raise object at 0x7da18ede7b50>
if compare[name[api_id] in name[api_definition_mapping]] begin[:]
<ast.Raise object at 0x7da18ede5cf0>
return[name[api_definition_mapping]] | keyword[def] identifier[load_api_folder] ( identifier[api_folder_path] ):
literal[string]
identifier[api_definition_mapping] ={}
identifier[api_items_mapping] = identifier[load_folder_content] ( identifier[api_folder_path] )
keyword[for] identifier[api_file_path] , identifier[api_items] keyword[in] identifier[api_items_mapping] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[api_items] , identifier[list] ):
keyword[for] identifier[api_item] keyword[in] identifier[api_items] :
identifier[key] , identifier[api_dict] = identifier[api_item] . identifier[popitem] ()
identifier[api_id] = identifier[api_dict] . identifier[get] ( literal[string] ) keyword[or] identifier[api_dict] . identifier[get] ( literal[string] ) keyword[or] identifier[api_dict] . identifier[get] ( literal[string] )
keyword[if] identifier[key] != literal[string] keyword[or] keyword[not] identifier[api_id] :
keyword[raise] identifier[exceptions] . identifier[ParamsError] (
literal[string] . identifier[format] ( identifier[api_file_path] ))
keyword[if] identifier[api_id] keyword[in] identifier[api_definition_mapping] :
keyword[raise] identifier[exceptions] . identifier[ParamsError] (
literal[string] . identifier[format] ( identifier[api_id] , identifier[api_file_path] ))
keyword[else] :
identifier[api_definition_mapping] [ identifier[api_id] ]= identifier[api_dict]
keyword[elif] identifier[isinstance] ( identifier[api_items] , identifier[dict] ):
keyword[if] identifier[api_file_path] keyword[in] identifier[api_definition_mapping] :
keyword[raise] identifier[exceptions] . identifier[ParamsError] (
literal[string] . identifier[format] ( identifier[api_file_path] ))
keyword[else] :
identifier[api_definition_mapping] [ identifier[api_file_path] ]= identifier[api_items]
keyword[return] identifier[api_definition_mapping] | def load_api_folder(api_folder_path):
""" load api definitions from api folder.
Args:
api_folder_path (str): api files folder.
api file should be in the following format:
[
{
"api": {
"def": "api_login",
"request": {},
"validate": []
}
},
{
"api": {
"def": "api_logout",
"request": {},
"validate": []
}
}
]
Returns:
dict: api definition mapping.
{
"api_login": {
"function_meta": {"func_name": "api_login", "args": [], "kwargs": {}}
"request": {}
},
"api_logout": {
"function_meta": {"func_name": "api_logout", "args": [], "kwargs": {}}
"request": {}
}
}
"""
api_definition_mapping = {}
api_items_mapping = load_folder_content(api_folder_path)
for (api_file_path, api_items) in api_items_mapping.items():
# TODO: add JSON schema validation
if isinstance(api_items, list):
for api_item in api_items:
(key, api_dict) = api_item.popitem()
api_id = api_dict.get('id') or api_dict.get('def') or api_dict.get('name')
if key != 'api' or not api_id:
raise exceptions.ParamsError('Invalid API defined in {}'.format(api_file_path)) # depends on [control=['if'], data=[]]
if api_id in api_definition_mapping:
raise exceptions.ParamsError('Duplicated API ({}) defined in {}'.format(api_id, api_file_path)) # depends on [control=['if'], data=['api_id']]
else:
api_definition_mapping[api_id] = api_dict # depends on [control=['for'], data=['api_item']] # depends on [control=['if'], data=[]]
elif isinstance(api_items, dict):
if api_file_path in api_definition_mapping:
raise exceptions.ParamsError('Duplicated API defined: {}'.format(api_file_path)) # depends on [control=['if'], data=['api_file_path']]
else:
api_definition_mapping[api_file_path] = api_items # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return api_definition_mapping |
def apt_sources(attrs=None, where=None):
'''
Return apt_sources information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.apt_sources
'''
if __grains__['os_family'] == 'Debian':
return _osquery_cmd(table='apt_sources', attrs=attrs, where=where)
return {'result': False, 'comment': 'Only available on Debian based systems.'} | def function[apt_sources, parameter[attrs, where]]:
constant[
Return apt_sources information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.apt_sources
]
if compare[call[name[__grains__]][constant[os_family]] equal[==] constant[Debian]] begin[:]
return[call[name[_osquery_cmd], parameter[]]]
return[dictionary[[<ast.Constant object at 0x7da1b2098250>, <ast.Constant object at 0x7da1b2099570>], [<ast.Constant object at 0x7da1b2099180>, <ast.Constant object at 0x7da1b2099c90>]]] | keyword[def] identifier[apt_sources] ( identifier[attrs] = keyword[None] , identifier[where] = keyword[None] ):
literal[string]
keyword[if] identifier[__grains__] [ literal[string] ]== literal[string] :
keyword[return] identifier[_osquery_cmd] ( identifier[table] = literal[string] , identifier[attrs] = identifier[attrs] , identifier[where] = identifier[where] )
keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] } | def apt_sources(attrs=None, where=None):
"""
Return apt_sources information from osquery
CLI Example:
.. code-block:: bash
salt '*' osquery.apt_sources
"""
if __grains__['os_family'] == 'Debian':
return _osquery_cmd(table='apt_sources', attrs=attrs, where=where) # depends on [control=['if'], data=[]]
return {'result': False, 'comment': 'Only available on Debian based systems.'} |
def get_view_description(view_cls, html=False):
"""
Given a view class, return a textual description to represent the view.
This name is used in the browsable API, and in OPTIONS responses.
This function is the default for the `VIEW_DESCRIPTION_FUNCTION` setting.
"""
description = view_cls.__doc__ or ''
description = formatting.dedent(smart_text(description))
if html:
return formatting.markup_description(description)
return description | def function[get_view_description, parameter[view_cls, html]]:
constant[
Given a view class, return a textual description to represent the view.
This name is used in the browsable API, and in OPTIONS responses.
This function is the default for the `VIEW_DESCRIPTION_FUNCTION` setting.
]
variable[description] assign[=] <ast.BoolOp object at 0x7da1b1300b80>
variable[description] assign[=] call[name[formatting].dedent, parameter[call[name[smart_text], parameter[name[description]]]]]
if name[html] begin[:]
return[call[name[formatting].markup_description, parameter[name[description]]]]
return[name[description]] | keyword[def] identifier[get_view_description] ( identifier[view_cls] , identifier[html] = keyword[False] ):
literal[string]
identifier[description] = identifier[view_cls] . identifier[__doc__] keyword[or] literal[string]
identifier[description] = identifier[formatting] . identifier[dedent] ( identifier[smart_text] ( identifier[description] ))
keyword[if] identifier[html] :
keyword[return] identifier[formatting] . identifier[markup_description] ( identifier[description] )
keyword[return] identifier[description] | def get_view_description(view_cls, html=False):
"""
Given a view class, return a textual description to represent the view.
This name is used in the browsable API, and in OPTIONS responses.
This function is the default for the `VIEW_DESCRIPTION_FUNCTION` setting.
"""
description = view_cls.__doc__ or ''
description = formatting.dedent(smart_text(description))
if html:
return formatting.markup_description(description) # depends on [control=['if'], data=[]]
return description |
def disambiguate_fname(self, fname):
"""Generate a file name without ambiguation."""
files_path_list = [filename for filename in self.filenames
if filename]
return sourcecode.disambiguate_fname(files_path_list, fname) | def function[disambiguate_fname, parameter[self, fname]]:
constant[Generate a file name without ambiguation.]
variable[files_path_list] assign[=] <ast.ListComp object at 0x7da2043451e0>
return[call[name[sourcecode].disambiguate_fname, parameter[name[files_path_list], name[fname]]]] | keyword[def] identifier[disambiguate_fname] ( identifier[self] , identifier[fname] ):
literal[string]
identifier[files_path_list] =[ identifier[filename] keyword[for] identifier[filename] keyword[in] identifier[self] . identifier[filenames]
keyword[if] identifier[filename] ]
keyword[return] identifier[sourcecode] . identifier[disambiguate_fname] ( identifier[files_path_list] , identifier[fname] ) | def disambiguate_fname(self, fname):
"""Generate a file name without ambiguation."""
files_path_list = [filename for filename in self.filenames if filename]
return sourcecode.disambiguate_fname(files_path_list, fname) |
def copyFilepath( self ):
"""
Copies the current filepath contents to the current clipboard.
"""
clipboard = QApplication.instance().clipboard()
clipboard.setText(self.filepath())
clipboard.setText(self.filepath(), clipboard.Selection) | def function[copyFilepath, parameter[self]]:
constant[
Copies the current filepath contents to the current clipboard.
]
variable[clipboard] assign[=] call[call[name[QApplication].instance, parameter[]].clipboard, parameter[]]
call[name[clipboard].setText, parameter[call[name[self].filepath, parameter[]]]]
call[name[clipboard].setText, parameter[call[name[self].filepath, parameter[]], name[clipboard].Selection]] | keyword[def] identifier[copyFilepath] ( identifier[self] ):
literal[string]
identifier[clipboard] = identifier[QApplication] . identifier[instance] (). identifier[clipboard] ()
identifier[clipboard] . identifier[setText] ( identifier[self] . identifier[filepath] ())
identifier[clipboard] . identifier[setText] ( identifier[self] . identifier[filepath] (), identifier[clipboard] . identifier[Selection] ) | def copyFilepath(self):
"""
Copies the current filepath contents to the current clipboard.
"""
clipboard = QApplication.instance().clipboard()
clipboard.setText(self.filepath())
clipboard.setText(self.filepath(), clipboard.Selection) |
def synchronize(self, verbose=False):
"""
Synchronizes the Repository information with the directory.
All registered but missing files and directories in the directory,
will be automatically removed from the Repository.
:parameters:
#. verbose (boolean): Whether to be warn and inform about any abnormalities.
"""
if self.__path is None:
return
# walk directories
for dirPath in sorted(list(self.walk_directories_relative_path())):
realPath = os.path.join(self.__path, dirPath)
# if directory exist
if os.path.isdir(realPath):
continue
if verbose: warnings.warn("%s directory is missing"%realPath)
# loop to get dirInfoDict
keys = dirPath.split(os.sep)
dirInfoDict = self
for idx in range(len(keys)-1):
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is None: break
dirInfoDict = dict.get(dirs, keys[idx], None)
if dirInfoDict is None: break
# remove dirInfoDict directory if existing
if dirInfoDict is not None:
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is not None:
dict.pop( dirs, keys[-1], None )
# walk files
for filePath in sorted(list(self.walk_files_relative_path())):
realPath = os.path.join(self.__path, filePath)
# if file exists
if os.path.isfile( realPath ):
continue
if verbose: warnings.warn("%s file is missing"%realPath)
# loop to get dirInfoDict
keys = filePath.split(os.sep)
dirInfoDict = self
for idx in range(len(keys)-1):
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is None: break
dirInfoDict = dict.get(dirs, keys[idx], None)
if dirInfoDict is None: break
# remove dirInfoDict file if existing
if dirInfoDict is not None:
files = dict.get(dirInfoDict, 'files', None)
if files is not None:
dict.pop( files, keys[-1], None ) | def function[synchronize, parameter[self, verbose]]:
constant[
Synchronizes the Repository information with the directory.
All registered but missing files and directories in the directory,
will be automatically removed from the Repository.
:parameters:
#. verbose (boolean): Whether to be warn and inform about any abnormalities.
]
if compare[name[self].__path is constant[None]] begin[:]
return[None]
for taget[name[dirPath]] in starred[call[name[sorted], parameter[call[name[list], parameter[call[name[self].walk_directories_relative_path, parameter[]]]]]]] begin[:]
variable[realPath] assign[=] call[name[os].path.join, parameter[name[self].__path, name[dirPath]]]
if call[name[os].path.isdir, parameter[name[realPath]]] begin[:]
continue
if name[verbose] begin[:]
call[name[warnings].warn, parameter[binary_operation[constant[%s directory is missing] <ast.Mod object at 0x7da2590d6920> name[realPath]]]]
variable[keys] assign[=] call[name[dirPath].split, parameter[name[os].sep]]
variable[dirInfoDict] assign[=] name[self]
for taget[name[idx]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[keys]]] - constant[1]]]]] begin[:]
variable[dirs] assign[=] call[name[dict].get, parameter[name[dirInfoDict], constant[directories], constant[None]]]
if compare[name[dirs] is constant[None]] begin[:]
break
variable[dirInfoDict] assign[=] call[name[dict].get, parameter[name[dirs], call[name[keys]][name[idx]], constant[None]]]
if compare[name[dirInfoDict] is constant[None]] begin[:]
break
if compare[name[dirInfoDict] is_not constant[None]] begin[:]
variable[dirs] assign[=] call[name[dict].get, parameter[name[dirInfoDict], constant[directories], constant[None]]]
if compare[name[dirs] is_not constant[None]] begin[:]
call[name[dict].pop, parameter[name[dirs], call[name[keys]][<ast.UnaryOp object at 0x7da2047e8af0>], constant[None]]]
for taget[name[filePath]] in starred[call[name[sorted], parameter[call[name[list], parameter[call[name[self].walk_files_relative_path, parameter[]]]]]]] begin[:]
variable[realPath] assign[=] call[name[os].path.join, parameter[name[self].__path, name[filePath]]]
if call[name[os].path.isfile, parameter[name[realPath]]] begin[:]
continue
if name[verbose] begin[:]
call[name[warnings].warn, parameter[binary_operation[constant[%s file is missing] <ast.Mod object at 0x7da2590d6920> name[realPath]]]]
variable[keys] assign[=] call[name[filePath].split, parameter[name[os].sep]]
variable[dirInfoDict] assign[=] name[self]
for taget[name[idx]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[keys]]] - constant[1]]]]] begin[:]
variable[dirs] assign[=] call[name[dict].get, parameter[name[dirInfoDict], constant[directories], constant[None]]]
if compare[name[dirs] is constant[None]] begin[:]
break
variable[dirInfoDict] assign[=] call[name[dict].get, parameter[name[dirs], call[name[keys]][name[idx]], constant[None]]]
if compare[name[dirInfoDict] is constant[None]] begin[:]
break
if compare[name[dirInfoDict] is_not constant[None]] begin[:]
variable[files] assign[=] call[name[dict].get, parameter[name[dirInfoDict], constant[files], constant[None]]]
if compare[name[files] is_not constant[None]] begin[:]
call[name[dict].pop, parameter[name[files], call[name[keys]][<ast.UnaryOp object at 0x7da2044c2140>], constant[None]]] | keyword[def] identifier[synchronize] ( identifier[self] , identifier[verbose] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[__path] keyword[is] keyword[None] :
keyword[return]
keyword[for] identifier[dirPath] keyword[in] identifier[sorted] ( identifier[list] ( identifier[self] . identifier[walk_directories_relative_path] ())):
identifier[realPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[__path] , identifier[dirPath] )
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[realPath] ):
keyword[continue]
keyword[if] identifier[verbose] : identifier[warnings] . identifier[warn] ( literal[string] % identifier[realPath] )
identifier[keys] = identifier[dirPath] . identifier[split] ( identifier[os] . identifier[sep] )
identifier[dirInfoDict] = identifier[self]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[keys] )- literal[int] ):
identifier[dirs] = identifier[dict] . identifier[get] ( identifier[dirInfoDict] , literal[string] , keyword[None] )
keyword[if] identifier[dirs] keyword[is] keyword[None] : keyword[break]
identifier[dirInfoDict] = identifier[dict] . identifier[get] ( identifier[dirs] , identifier[keys] [ identifier[idx] ], keyword[None] )
keyword[if] identifier[dirInfoDict] keyword[is] keyword[None] : keyword[break]
keyword[if] identifier[dirInfoDict] keyword[is] keyword[not] keyword[None] :
identifier[dirs] = identifier[dict] . identifier[get] ( identifier[dirInfoDict] , literal[string] , keyword[None] )
keyword[if] identifier[dirs] keyword[is] keyword[not] keyword[None] :
identifier[dict] . identifier[pop] ( identifier[dirs] , identifier[keys] [- literal[int] ], keyword[None] )
keyword[for] identifier[filePath] keyword[in] identifier[sorted] ( identifier[list] ( identifier[self] . identifier[walk_files_relative_path] ())):
identifier[realPath] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[__path] , identifier[filePath] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[realPath] ):
keyword[continue]
keyword[if] identifier[verbose] : identifier[warnings] . identifier[warn] ( literal[string] % identifier[realPath] )
identifier[keys] = identifier[filePath] . identifier[split] ( identifier[os] . identifier[sep] )
identifier[dirInfoDict] = identifier[self]
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[len] ( identifier[keys] )- literal[int] ):
identifier[dirs] = identifier[dict] . identifier[get] ( identifier[dirInfoDict] , literal[string] , keyword[None] )
keyword[if] identifier[dirs] keyword[is] keyword[None] : keyword[break]
identifier[dirInfoDict] = identifier[dict] . identifier[get] ( identifier[dirs] , identifier[keys] [ identifier[idx] ], keyword[None] )
keyword[if] identifier[dirInfoDict] keyword[is] keyword[None] : keyword[break]
keyword[if] identifier[dirInfoDict] keyword[is] keyword[not] keyword[None] :
identifier[files] = identifier[dict] . identifier[get] ( identifier[dirInfoDict] , literal[string] , keyword[None] )
keyword[if] identifier[files] keyword[is] keyword[not] keyword[None] :
identifier[dict] . identifier[pop] ( identifier[files] , identifier[keys] [- literal[int] ], keyword[None] ) | def synchronize(self, verbose=False):
"""
Synchronizes the Repository information with the directory.
All registered but missing files and directories in the directory,
will be automatically removed from the Repository.
:parameters:
#. verbose (boolean): Whether to be warn and inform about any abnormalities.
"""
if self.__path is None:
return # depends on [control=['if'], data=[]]
# walk directories
for dirPath in sorted(list(self.walk_directories_relative_path())):
realPath = os.path.join(self.__path, dirPath)
# if directory exist
if os.path.isdir(realPath):
continue # depends on [control=['if'], data=[]]
if verbose:
warnings.warn('%s directory is missing' % realPath) # depends on [control=['if'], data=[]]
# loop to get dirInfoDict
keys = dirPath.split(os.sep)
dirInfoDict = self
for idx in range(len(keys) - 1):
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is None:
break # depends on [control=['if'], data=[]]
dirInfoDict = dict.get(dirs, keys[idx], None)
if dirInfoDict is None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
# remove dirInfoDict directory if existing
if dirInfoDict is not None:
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is not None:
dict.pop(dirs, keys[-1], None) # depends on [control=['if'], data=['dirs']] # depends on [control=['if'], data=['dirInfoDict']] # depends on [control=['for'], data=['dirPath']]
# walk files
for filePath in sorted(list(self.walk_files_relative_path())):
realPath = os.path.join(self.__path, filePath)
# if file exists
if os.path.isfile(realPath):
continue # depends on [control=['if'], data=[]]
if verbose:
warnings.warn('%s file is missing' % realPath) # depends on [control=['if'], data=[]]
# loop to get dirInfoDict
keys = filePath.split(os.sep)
dirInfoDict = self
for idx in range(len(keys) - 1):
dirs = dict.get(dirInfoDict, 'directories', None)
if dirs is None:
break # depends on [control=['if'], data=[]]
dirInfoDict = dict.get(dirs, keys[idx], None)
if dirInfoDict is None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['idx']]
# remove dirInfoDict file if existing
if dirInfoDict is not None:
files = dict.get(dirInfoDict, 'files', None)
if files is not None:
dict.pop(files, keys[-1], None) # depends on [control=['if'], data=['files']] # depends on [control=['if'], data=['dirInfoDict']] # depends on [control=['for'], data=['filePath']] |
def replace_tags(string, from_tag='i', to_tag='italic'):
"""
Replace tags such as <i> to <italic>
<sup> and <sub> are allowed and do not need to be replaced
This does not validate markup
"""
string = string.replace('<' + from_tag + '>', '<' + to_tag + '>')
string = string.replace('</' + from_tag + '>', '</' + to_tag + '>')
return string | def function[replace_tags, parameter[string, from_tag, to_tag]]:
constant[
Replace tags such as <i> to <italic>
<sup> and <sub> are allowed and do not need to be replaced
This does not validate markup
]
variable[string] assign[=] call[name[string].replace, parameter[binary_operation[binary_operation[constant[<] + name[from_tag]] + constant[>]], binary_operation[binary_operation[constant[<] + name[to_tag]] + constant[>]]]]
variable[string] assign[=] call[name[string].replace, parameter[binary_operation[binary_operation[constant[</] + name[from_tag]] + constant[>]], binary_operation[binary_operation[constant[</] + name[to_tag]] + constant[>]]]]
return[name[string]] | keyword[def] identifier[replace_tags] ( identifier[string] , identifier[from_tag] = literal[string] , identifier[to_tag] = literal[string] ):
literal[string]
identifier[string] = identifier[string] . identifier[replace] ( literal[string] + identifier[from_tag] + literal[string] , literal[string] + identifier[to_tag] + literal[string] )
identifier[string] = identifier[string] . identifier[replace] ( literal[string] + identifier[from_tag] + literal[string] , literal[string] + identifier[to_tag] + literal[string] )
keyword[return] identifier[string] | def replace_tags(string, from_tag='i', to_tag='italic'):
"""
Replace tags such as <i> to <italic>
<sup> and <sub> are allowed and do not need to be replaced
This does not validate markup
"""
string = string.replace('<' + from_tag + '>', '<' + to_tag + '>')
string = string.replace('</' + from_tag + '>', '</' + to_tag + '>')
return string |
def linear_get_coefficients(coef, rhouv, s, i, j, k, u, v,
unfolding, matrix_form):
r"""Get the indices mu, nu, and term coefficients for linear terms.
We determine mu and nu, the indices labeling the density matrix components
d rho[mu] /dt = sum_nu A[mu, nu]*rho[nu]
for this complex and rho_u,v.
>>> from fast.symbolic import define_density_matrix
>>> Ne = 2
>>> coef = 1+2j
>>> rhouv = define_density_matrix(Ne)[1, 1]
>>> s, i, j, k, u, v = (1, 1, 0, 1, 1, 1)
>>> unfolding = Unfolding(Ne, real=True, normalized=True)
>>> linear_get_coefficients(coef, rhouv, s, i, j, k, u, v,
... unfolding, False)
[[1, 0, -2.00000000000000, False, False]]
"""
Ne = unfolding.Ne
Mu = unfolding.Mu
# We determine mu, the index labeling the equation.
mu = Mu(s, i, j)
if unfolding.normalized and u == 0 and v == 0:
# We find the nu and coefficients for a term of the form.
# coef*rho_{00} = coef*(1-sum_{i=1}^{Ne-1} rho_{ii})
if unfolding.real:
ss = 1
else:
ss = 0
mu11 = Mu(ss, 1, 1)
muNeNe = Mu(ss, Ne-1, Ne-1)
rhouv_isconjugated = False
if s == 1:
coef_list = [[mu, nu, im(coef), matrix_form, rhouv_isconjugated]
for nu in range(mu11, muNeNe+1)]
elif s == -1:
coef_list = [[mu, nu, -re(coef), matrix_form, rhouv_isconjugated]
for nu in range(mu11, muNeNe+1)]
elif s == 0:
coef_list = [[mu, nu, -coef, matrix_form, rhouv_isconjugated]
for nu in range(mu11, muNeNe+1)]
return coef_list
#####################################################################
if (unfolding.lower_triangular and
isinstance(rhouv, sympy.conjugate)):
u, v = (v, u)
rhouv_isconjugated = True
else:
rhouv_isconjugated = False
# If the unfolding is real, there are two terms for this
# component rhouv of equation mu.
if unfolding.real:
nur = Mu(1, u, v)
nui = Mu(-1, u, v)
else:
nu = Mu(0, u, v)
#####################################################################
# We determine the coefficients for each term.
if unfolding.real:
# There are two sets of forumas for the coefficients depending
# on whether rhouv_isconjugated.
# re(I*x*conjugate(y)) = -im(x)*re(y) + re(x)*im(y)
# re(I*x*y) = -im(x)*re(y) - re(x)*im(y)
# im(I*x*conjugate(y)) = +re(x)*re(y) + im(x)*im(y)
# im(I*x*y) = +re(x)*re(y) - im(x)*im(y)
if s == 1:
# The real part
if rhouv_isconjugated:
coef_rerhouv = -im(coef)
coef_imrhouv = re(coef)
else:
coef_rerhouv = -im(coef)
coef_imrhouv = -re(coef)
elif s == -1:
if rhouv_isconjugated:
coef_rerhouv = re(coef)
coef_imrhouv = im(coef)
else:
coef_rerhouv = re(coef)
coef_imrhouv = -im(coef)
coef_list = [[mu, nur, coef_rerhouv, matrix_form, rhouv_isconjugated]]
if nui is not None:
coef_list += [[mu, nui, coef_imrhouv,
matrix_form, rhouv_isconjugated]]
else:
coef_list = [[mu, nu, coef, matrix_form, rhouv_isconjugated]]
return coef_list | def function[linear_get_coefficients, parameter[coef, rhouv, s, i, j, k, u, v, unfolding, matrix_form]]:
constant[Get the indices mu, nu, and term coefficients for linear terms.
We determine mu and nu, the indices labeling the density matrix components
d rho[mu] /dt = sum_nu A[mu, nu]*rho[nu]
for this complex and rho_u,v.
>>> from fast.symbolic import define_density_matrix
>>> Ne = 2
>>> coef = 1+2j
>>> rhouv = define_density_matrix(Ne)[1, 1]
>>> s, i, j, k, u, v = (1, 1, 0, 1, 1, 1)
>>> unfolding = Unfolding(Ne, real=True, normalized=True)
>>> linear_get_coefficients(coef, rhouv, s, i, j, k, u, v,
... unfolding, False)
[[1, 0, -2.00000000000000, False, False]]
]
variable[Ne] assign[=] name[unfolding].Ne
variable[Mu] assign[=] name[unfolding].Mu
variable[mu] assign[=] call[name[Mu], parameter[name[s], name[i], name[j]]]
if <ast.BoolOp object at 0x7da1b19ae770> begin[:]
if name[unfolding].real begin[:]
variable[ss] assign[=] constant[1]
variable[mu11] assign[=] call[name[Mu], parameter[name[ss], constant[1], constant[1]]]
variable[muNeNe] assign[=] call[name[Mu], parameter[name[ss], binary_operation[name[Ne] - constant[1]], binary_operation[name[Ne] - constant[1]]]]
variable[rhouv_isconjugated] assign[=] constant[False]
if compare[name[s] equal[==] constant[1]] begin[:]
variable[coef_list] assign[=] <ast.ListComp object at 0x7da1b19adcc0>
return[name[coef_list]]
if <ast.BoolOp object at 0x7da1b19aedd0> begin[:]
<ast.Tuple object at 0x7da1b19aef80> assign[=] tuple[[<ast.Name object at 0x7da1b19af040>, <ast.Name object at 0x7da1b19af070>]]
variable[rhouv_isconjugated] assign[=] constant[True]
if name[unfolding].real begin[:]
variable[nur] assign[=] call[name[Mu], parameter[constant[1], name[u], name[v]]]
variable[nui] assign[=] call[name[Mu], parameter[<ast.UnaryOp object at 0x7da1b196dfc0>, name[u], name[v]]]
if name[unfolding].real begin[:]
if compare[name[s] equal[==] constant[1]] begin[:]
if name[rhouv_isconjugated] begin[:]
variable[coef_rerhouv] assign[=] <ast.UnaryOp object at 0x7da1b196f190>
variable[coef_imrhouv] assign[=] call[name[re], parameter[name[coef]]]
variable[coef_list] assign[=] list[[<ast.List object at 0x7da1b19adae0>]]
if compare[name[nui] is_not constant[None]] begin[:]
<ast.AugAssign object at 0x7da1b19ad8d0>
return[name[coef_list]] | keyword[def] identifier[linear_get_coefficients] ( identifier[coef] , identifier[rhouv] , identifier[s] , identifier[i] , identifier[j] , identifier[k] , identifier[u] , identifier[v] ,
identifier[unfolding] , identifier[matrix_form] ):
literal[string]
identifier[Ne] = identifier[unfolding] . identifier[Ne]
identifier[Mu] = identifier[unfolding] . identifier[Mu]
identifier[mu] = identifier[Mu] ( identifier[s] , identifier[i] , identifier[j] )
keyword[if] identifier[unfolding] . identifier[normalized] keyword[and] identifier[u] == literal[int] keyword[and] identifier[v] == literal[int] :
keyword[if] identifier[unfolding] . identifier[real] :
identifier[ss] = literal[int]
keyword[else] :
identifier[ss] = literal[int]
identifier[mu11] = identifier[Mu] ( identifier[ss] , literal[int] , literal[int] )
identifier[muNeNe] = identifier[Mu] ( identifier[ss] , identifier[Ne] - literal[int] , identifier[Ne] - literal[int] )
identifier[rhouv_isconjugated] = keyword[False]
keyword[if] identifier[s] == literal[int] :
identifier[coef_list] =[[ identifier[mu] , identifier[nu] , identifier[im] ( identifier[coef] ), identifier[matrix_form] , identifier[rhouv_isconjugated] ]
keyword[for] identifier[nu] keyword[in] identifier[range] ( identifier[mu11] , identifier[muNeNe] + literal[int] )]
keyword[elif] identifier[s] ==- literal[int] :
identifier[coef_list] =[[ identifier[mu] , identifier[nu] ,- identifier[re] ( identifier[coef] ), identifier[matrix_form] , identifier[rhouv_isconjugated] ]
keyword[for] identifier[nu] keyword[in] identifier[range] ( identifier[mu11] , identifier[muNeNe] + literal[int] )]
keyword[elif] identifier[s] == literal[int] :
identifier[coef_list] =[[ identifier[mu] , identifier[nu] ,- identifier[coef] , identifier[matrix_form] , identifier[rhouv_isconjugated] ]
keyword[for] identifier[nu] keyword[in] identifier[range] ( identifier[mu11] , identifier[muNeNe] + literal[int] )]
keyword[return] identifier[coef_list]
keyword[if] ( identifier[unfolding] . identifier[lower_triangular] keyword[and]
identifier[isinstance] ( identifier[rhouv] , identifier[sympy] . identifier[conjugate] )):
identifier[u] , identifier[v] =( identifier[v] , identifier[u] )
identifier[rhouv_isconjugated] = keyword[True]
keyword[else] :
identifier[rhouv_isconjugated] = keyword[False]
keyword[if] identifier[unfolding] . identifier[real] :
identifier[nur] = identifier[Mu] ( literal[int] , identifier[u] , identifier[v] )
identifier[nui] = identifier[Mu] (- literal[int] , identifier[u] , identifier[v] )
keyword[else] :
identifier[nu] = identifier[Mu] ( literal[int] , identifier[u] , identifier[v] )
keyword[if] identifier[unfolding] . identifier[real] :
keyword[if] identifier[s] == literal[int] :
keyword[if] identifier[rhouv_isconjugated] :
identifier[coef_rerhouv] =- identifier[im] ( identifier[coef] )
identifier[coef_imrhouv] = identifier[re] ( identifier[coef] )
keyword[else] :
identifier[coef_rerhouv] =- identifier[im] ( identifier[coef] )
identifier[coef_imrhouv] =- identifier[re] ( identifier[coef] )
keyword[elif] identifier[s] ==- literal[int] :
keyword[if] identifier[rhouv_isconjugated] :
identifier[coef_rerhouv] = identifier[re] ( identifier[coef] )
identifier[coef_imrhouv] = identifier[im] ( identifier[coef] )
keyword[else] :
identifier[coef_rerhouv] = identifier[re] ( identifier[coef] )
identifier[coef_imrhouv] =- identifier[im] ( identifier[coef] )
identifier[coef_list] =[[ identifier[mu] , identifier[nur] , identifier[coef_rerhouv] , identifier[matrix_form] , identifier[rhouv_isconjugated] ]]
keyword[if] identifier[nui] keyword[is] keyword[not] keyword[None] :
identifier[coef_list] +=[[ identifier[mu] , identifier[nui] , identifier[coef_imrhouv] ,
identifier[matrix_form] , identifier[rhouv_isconjugated] ]]
keyword[else] :
identifier[coef_list] =[[ identifier[mu] , identifier[nu] , identifier[coef] , identifier[matrix_form] , identifier[rhouv_isconjugated] ]]
keyword[return] identifier[coef_list] | def linear_get_coefficients(coef, rhouv, s, i, j, k, u, v, unfolding, matrix_form):
"""Get the indices mu, nu, and term coefficients for linear terms.
We determine mu and nu, the indices labeling the density matrix components
d rho[mu] /dt = sum_nu A[mu, nu]*rho[nu]
for this complex and rho_u,v.
>>> from fast.symbolic import define_density_matrix
>>> Ne = 2
>>> coef = 1+2j
>>> rhouv = define_density_matrix(Ne)[1, 1]
>>> s, i, j, k, u, v = (1, 1, 0, 1, 1, 1)
>>> unfolding = Unfolding(Ne, real=True, normalized=True)
>>> linear_get_coefficients(coef, rhouv, s, i, j, k, u, v,
... unfolding, False)
[[1, 0, -2.00000000000000, False, False]]
"""
Ne = unfolding.Ne
Mu = unfolding.Mu
# We determine mu, the index labeling the equation.
mu = Mu(s, i, j)
if unfolding.normalized and u == 0 and (v == 0):
# We find the nu and coefficients for a term of the form.
# coef*rho_{00} = coef*(1-sum_{i=1}^{Ne-1} rho_{ii})
if unfolding.real:
ss = 1 # depends on [control=['if'], data=[]]
else:
ss = 0
mu11 = Mu(ss, 1, 1)
muNeNe = Mu(ss, Ne - 1, Ne - 1)
rhouv_isconjugated = False
if s == 1:
coef_list = [[mu, nu, im(coef), matrix_form, rhouv_isconjugated] for nu in range(mu11, muNeNe + 1)] # depends on [control=['if'], data=[]]
elif s == -1:
coef_list = [[mu, nu, -re(coef), matrix_form, rhouv_isconjugated] for nu in range(mu11, muNeNe + 1)] # depends on [control=['if'], data=[]]
elif s == 0:
coef_list = [[mu, nu, -coef, matrix_form, rhouv_isconjugated] for nu in range(mu11, muNeNe + 1)] # depends on [control=['if'], data=[]]
return coef_list # depends on [control=['if'], data=[]]
#####################################################################
if unfolding.lower_triangular and isinstance(rhouv, sympy.conjugate):
(u, v) = (v, u)
rhouv_isconjugated = True # depends on [control=['if'], data=[]]
else:
rhouv_isconjugated = False
# If the unfolding is real, there are two terms for this
# component rhouv of equation mu.
if unfolding.real:
nur = Mu(1, u, v)
nui = Mu(-1, u, v) # depends on [control=['if'], data=[]]
else:
nu = Mu(0, u, v)
#####################################################################
# We determine the coefficients for each term.
if unfolding.real:
# There are two sets of forumas for the coefficients depending
# on whether rhouv_isconjugated.
# re(I*x*conjugate(y)) = -im(x)*re(y) + re(x)*im(y)
# re(I*x*y) = -im(x)*re(y) - re(x)*im(y)
# im(I*x*conjugate(y)) = +re(x)*re(y) + im(x)*im(y)
# im(I*x*y) = +re(x)*re(y) - im(x)*im(y)
if s == 1:
# The real part
if rhouv_isconjugated:
coef_rerhouv = -im(coef)
coef_imrhouv = re(coef) # depends on [control=['if'], data=[]]
else:
coef_rerhouv = -im(coef)
coef_imrhouv = -re(coef) # depends on [control=['if'], data=[]]
elif s == -1:
if rhouv_isconjugated:
coef_rerhouv = re(coef)
coef_imrhouv = im(coef) # depends on [control=['if'], data=[]]
else:
coef_rerhouv = re(coef)
coef_imrhouv = -im(coef) # depends on [control=['if'], data=[]]
coef_list = [[mu, nur, coef_rerhouv, matrix_form, rhouv_isconjugated]]
if nui is not None:
coef_list += [[mu, nui, coef_imrhouv, matrix_form, rhouv_isconjugated]] # depends on [control=['if'], data=['nui']] # depends on [control=['if'], data=[]]
else:
coef_list = [[mu, nu, coef, matrix_form, rhouv_isconjugated]]
return coef_list |
def sample_statements(stmts, seed=None):
"""Return statements sampled according to belief.
Statements are sampled independently according to their
belief scores. For instance, a Staement with a belief
score of 0.7 will end up in the returned Statement list
with probability 0.7.
Parameters
----------
stmts : list[indra.statements.Statement]
A list of INDRA Statements to sample.
seed : Optional[int]
A seed for the random number generator used for sampling.
Returns
-------
new_stmts : list[indra.statements.Statement]
A list of INDRA Statements that were chosen by random sampling
according to their respective belief scores.
"""
if seed:
numpy.random.seed(seed)
new_stmts = []
r = numpy.random.rand(len(stmts))
for i, stmt in enumerate(stmts):
if r[i] < stmt.belief:
new_stmts.append(stmt)
return new_stmts | def function[sample_statements, parameter[stmts, seed]]:
constant[Return statements sampled according to belief.
Statements are sampled independently according to their
belief scores. For instance, a Staement with a belief
score of 0.7 will end up in the returned Statement list
with probability 0.7.
Parameters
----------
stmts : list[indra.statements.Statement]
A list of INDRA Statements to sample.
seed : Optional[int]
A seed for the random number generator used for sampling.
Returns
-------
new_stmts : list[indra.statements.Statement]
A list of INDRA Statements that were chosen by random sampling
according to their respective belief scores.
]
if name[seed] begin[:]
call[name[numpy].random.seed, parameter[name[seed]]]
variable[new_stmts] assign[=] list[[]]
variable[r] assign[=] call[name[numpy].random.rand, parameter[call[name[len], parameter[name[stmts]]]]]
for taget[tuple[[<ast.Name object at 0x7da20c76d480>, <ast.Name object at 0x7da20c76f310>]]] in starred[call[name[enumerate], parameter[name[stmts]]]] begin[:]
if compare[call[name[r]][name[i]] less[<] name[stmt].belief] begin[:]
call[name[new_stmts].append, parameter[name[stmt]]]
return[name[new_stmts]] | keyword[def] identifier[sample_statements] ( identifier[stmts] , identifier[seed] = keyword[None] ):
literal[string]
keyword[if] identifier[seed] :
identifier[numpy] . identifier[random] . identifier[seed] ( identifier[seed] )
identifier[new_stmts] =[]
identifier[r] = identifier[numpy] . identifier[random] . identifier[rand] ( identifier[len] ( identifier[stmts] ))
keyword[for] identifier[i] , identifier[stmt] keyword[in] identifier[enumerate] ( identifier[stmts] ):
keyword[if] identifier[r] [ identifier[i] ]< identifier[stmt] . identifier[belief] :
identifier[new_stmts] . identifier[append] ( identifier[stmt] )
keyword[return] identifier[new_stmts] | def sample_statements(stmts, seed=None):
"""Return statements sampled according to belief.
Statements are sampled independently according to their
belief scores. For instance, a Staement with a belief
score of 0.7 will end up in the returned Statement list
with probability 0.7.
Parameters
----------
stmts : list[indra.statements.Statement]
A list of INDRA Statements to sample.
seed : Optional[int]
A seed for the random number generator used for sampling.
Returns
-------
new_stmts : list[indra.statements.Statement]
A list of INDRA Statements that were chosen by random sampling
according to their respective belief scores.
"""
if seed:
numpy.random.seed(seed) # depends on [control=['if'], data=[]]
new_stmts = []
r = numpy.random.rand(len(stmts))
for (i, stmt) in enumerate(stmts):
if r[i] < stmt.belief:
new_stmts.append(stmt) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return new_stmts |
def main():
"""
NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available plots.
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png
"""
dirlist = ['./']
dir_path = os.getcwd()
names = os.listdir(dir_path)
for n in names:
if 'Location' in n:
dirlist.append(n)
if '-fmt' in sys.argv:
ind = sys.argv.index("-fmt")
fmt = sys.argv[ind+1]
else:
fmt = 'png'
if '-f' in sys.argv:
ind = sys.argv.index("-f")
filelist = [sys.argv[ind+1]]
else:
filelist = os.listdir(dir_path)
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
for loc in dirlist:
print('working on: ', loc)
os.chdir(loc) # change working directories to each location
crd = 's'
if 'er_samples.txt' in filelist: # find coordinate systems
samps, file_type = pmag.magic_read(
'er_samples.txt') # read in data
# get all none blank sample orientations
Srecs = pmag.get_dictitem(samps, 'sample_azimuth', '', 'F')
if len(Srecs) > 0:
crd = 'g'
if 'magic_measurements.txt' in filelist: # start with measurement data
print('working on measurements data')
data, file_type = pmag.magic_read(
'magic_measurements.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T')
# looking for zeq_magic possibilities
# get all none blank method codes
AFZrecs = pmag.get_dictitem(
data, 'magic_method_codes', 'LT-AF-Z', 'has')
# get all none blank method codes
TZrecs = pmag.get_dictitem(
data, 'magic_method_codes', 'LT-T-Z', 'has')
# get all none blank method codes
MZrecs = pmag.get_dictitem(
data, 'magic_method_codes', 'LT-M-Z', 'has')
# get all dec measurements
Drecs = pmag.get_dictitem(data, 'measurement_dec', '', 'F')
# get all dec measurements
Irecs = pmag.get_dictitem(data, 'measurement_inc', '', 'F')
Mkeys = ['measurement_magnitude', 'measurement_magn_moment',
'measurement_magn_volume', 'measurement_magn_mass']
for key in Mkeys:
Mrecs = pmag.get_dictitem(
data, key, '', 'F') # get intensity data
if len(Mrecs) > 0:
break
# potential for stepwise demag curves
if len(AFZrecs) > 0 or len(TZrecs) > 0 or len(MZrecs) > 0 and len(Drecs) > 0 and len(Irecs) > 0 and len(Mrecs) > 0:
print('zeq_magic.py -fsp pmag_specimens.txt -sav -fmt ' +
fmt+' -crd '+crd)
os.system('zeq_magic.py -sav -fmt '+fmt+' -crd '+crd)
# looking for thellier_magic possibilities
if len(pmag.get_dictitem(data, 'magic_method_codes', 'LP-PI-TRM', 'has')) > 0:
print('thellier_magic.py -fsp pmag_specimens.txt -sav -fmt '+fmt)
os.system('thellier_magic.py -sav -fmt '+fmt)
# looking for hysteresis possibilities
if len(pmag.get_dictitem(data, 'magic_method_codes', 'LP-HYS', 'has')) > 0: # find hyst experiments
print('quick_hyst.py -sav -fmt '+fmt)
os.system('quick_hyst.py -sav -fmt '+fmt)
if 'pmag_results.txt' in filelist: # start with measurement data
data, file_type = pmag.magic_read(
'pmag_results.txt') # read in data
print('number of datapoints: ', len(data))
if loc == './':
# get all the concatenated location names from data file
data = pmag.get_dictitem(data, 'er_location_names', ':', 'has')
print('number of datapoints: ', len(data), loc)
print('working on pmag_results directions')
SiteDIs = pmag.get_dictitem(
data, 'average_dec', "", 'F') # find decs
print('number of directions: ', len(SiteDIs))
SiteDIs = pmag.get_dictitem(
SiteDIs, 'average_inc', "", 'F') # find decs and incs
print('number of directions: ', len(SiteDIs))
# only individual results - not poles
SiteDIs = pmag.get_dictitem(SiteDIs, 'data_type', 'i', 'has')
print('number of directions: ', len(SiteDIs))
# tilt corrected coordinates
SiteDIs_t = pmag.get_dictitem(
SiteDIs, 'tilt_correction', '100', 'T')
print('number of directions: ', len(SiteDIs))
if len(SiteDIs_t) > 0:
print('eqarea_magic.py -sav -crd t -fmt '+fmt)
os.system('eqarea_magic.py -sav -crd t -fmt '+fmt)
elif len(SiteDIs) > 0 and 'tilt_correction' not in SiteDIs[0].keys():
print('eqarea_magic.py -sav -fmt '+fmt)
os.system('eqarea_magic.py -sav -fmt '+fmt)
else:
SiteDIs_g = pmag.get_dictitem(
SiteDIs, 'tilt_correction', '0', 'T') # geographic coordinates
if len(SiteDIs_g) > 0:
print('eqarea_magic.py -sav -crd g -fmt '+fmt)
os.system('eqarea_magic.py -sav -crd g -fmt '+fmt)
else:
SiteDIs_s = pmag.get_dictitem(
SiteDIs, 'tilt_correction', '-1', 'T') # sample coordinates
if len(SiteDIs_s) > 0:
print('eqarea_magic.py -sav -crd s -fmt '+fmt)
os.system('eqarea_magic.py -sav -crd s -fmt '+fmt)
else:
SiteDIs_x = pmag.get_dictitem(
SiteDIs, 'tilt_correction', '', 'T') # no coordinates
if len(SiteDIs_x) > 0:
print('eqarea_magic.py -sav -fmt '+fmt)
os.system('eqarea_magic.py -sav -fmt '+fmt)
print('working on pmag_results VGP map')
VGPs = pmag.get_dictitem(
SiteDIs, 'vgp_lat', "", 'F') # are there any VGPs?
if len(VGPs) > 0: # YES!
os.system(
'vgpmap_magic.py -prj moll -res c -sym ro 5 -sav -fmt png')
print('working on pmag_results intensities')
os.system(
'magic_select.py -f pmag_results.txt -key data_type i T -F tmp.txt')
os.system(
'magic_select.py -f tmp.txt -key average_int 0. has -F tmp1.txt')
os.system(
"grab_magic_key.py -f tmp1.txt -key average_int | awk '{print $1*1e6}' >tmp2.txt")
data, file_type = pmag.magic_read('tmp1.txt') # read in data
locations = pmag.get_dictkey(data, 'er_location_names', "")
histfile = 'LO:_'+locations[0]+'_intensities_histogram:_.'+fmt
os.system(
"histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F " + histfile)
print(
"histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F " + histfile)
os.system('rm tmp*.txt')
if 'rmag_hysteresis.txt' in filelist: # start with measurement data
print('working on rmag_hysteresis')
data, file_type = pmag.magic_read(
'rmag_hysteresis.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T')
hdata = pmag.get_dictitem(data, 'hysteresis_bcr', '', 'F')
hdata = pmag.get_dictitem(hdata, 'hysteresis_mr_moment', '', 'F')
hdata = pmag.get_dictitem(hdata, 'hysteresis_ms_moment', '', 'F')
# there are data for a dayplot
hdata = pmag.get_dictitem(hdata, 'hysteresis_bc', '', 'F')
if len(hdata) > 0:
print('dayplot_magic.py -sav -fmt '+fmt)
os.system('dayplot_magic.py -sav -fmt '+fmt)
# if 'er_sites.txt' in filelist: # start with measurement data
# print 'working on er_sites'
#os.system('basemap_magic.py -sav -fmt '+fmt)
if 'rmag_anisotropy.txt' in filelist: # do anisotropy plots if possible
print('working on rmag_anisotropy')
data, file_type = pmag.magic_read(
'rmag_anisotropy.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T')
# get specimen coordinates
sdata = pmag.get_dictitem(
data, 'anisotropy_tilt_correction', '-1', 'T')
# get specimen coordinates
gdata = pmag.get_dictitem(
data, 'anisotropy_tilt_correction', '0', 'T')
# get specimen coordinates
tdata = pmag.get_dictitem(
data, 'anisotropy_tilt_correction', '100', 'T')
if len(sdata) > 3:
print('aniso_magic.py -x -B -crd s -sav -fmt '+fmt)
os.system('aniso_magic.py -x -B -crd s -sav -fmt '+fmt)
if len(gdata) > 3:
os.system('aniso_magic.py -x -B -crd g -sav -fmt '+fmt)
if len(tdata) > 3:
os.system('aniso_magic.py -x -B -crd t -sav -fmt '+fmt)
if loc != './':
os.chdir('..') | def function[main, parameter[]]:
constant[
NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available plots.
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png
]
variable[dirlist] assign[=] list[[<ast.Constant object at 0x7da1b05275e0>]]
variable[dir_path] assign[=] call[name[os].getcwd, parameter[]]
variable[names] assign[=] call[name[os].listdir, parameter[name[dir_path]]]
for taget[name[n]] in starred[name[names]] begin[:]
if compare[constant[Location] in name[n]] begin[:]
call[name[dirlist].append, parameter[name[n]]]
if compare[constant[-fmt] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-fmt]]]
variable[fmt] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]]
if compare[constant[-f] in name[sys].argv] begin[:]
variable[ind] assign[=] call[name[sys].argv.index, parameter[constant[-f]]]
variable[filelist] assign[=] list[[<ast.Subscript object at 0x7da1b0526a10>]]
if compare[constant[-h] in name[sys].argv] begin[:]
call[name[print], parameter[name[main].__doc__]]
call[name[sys].exit, parameter[]]
for taget[name[loc]] in starred[name[dirlist]] begin[:]
call[name[print], parameter[constant[working on: ], name[loc]]]
call[name[os].chdir, parameter[name[loc]]]
variable[crd] assign[=] constant[s]
if compare[constant[er_samples.txt] in name[filelist]] begin[:]
<ast.Tuple object at 0x7da1b0526080> assign[=] call[name[pmag].magic_read, parameter[constant[er_samples.txt]]]
variable[Srecs] assign[=] call[name[pmag].get_dictitem, parameter[name[samps], constant[sample_azimuth], constant[], constant[F]]]
if compare[call[name[len], parameter[name[Srecs]]] greater[>] constant[0]] begin[:]
variable[crd] assign[=] constant[g]
if compare[constant[magic_measurements.txt] in name[filelist]] begin[:]
call[name[print], parameter[constant[working on measurements data]]]
<ast.Tuple object at 0x7da1b05259f0> assign[=] call[name[pmag].magic_read, parameter[constant[magic_measurements.txt]]]
if compare[name[loc] equal[==] constant[./]] begin[:]
variable[data] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[er_location_name], constant[], constant[T]]]
variable[AFZrecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[magic_method_codes], constant[LT-AF-Z], constant[has]]]
variable[TZrecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[magic_method_codes], constant[LT-T-Z], constant[has]]]
variable[MZrecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[magic_method_codes], constant[LT-M-Z], constant[has]]]
variable[Drecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[measurement_dec], constant[], constant[F]]]
variable[Irecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[measurement_inc], constant[], constant[F]]]
variable[Mkeys] assign[=] list[[<ast.Constant object at 0x7da1b0524d30>, <ast.Constant object at 0x7da1b0524d00>, <ast.Constant object at 0x7da1b0524cd0>, <ast.Constant object at 0x7da1b0524ca0>]]
for taget[name[key]] in starred[name[Mkeys]] begin[:]
variable[Mrecs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], name[key], constant[], constant[F]]]
if compare[call[name[len], parameter[name[Mrecs]]] greater[>] constant[0]] begin[:]
break
if <ast.BoolOp object at 0x7da1b0524880> begin[:]
call[name[print], parameter[binary_operation[binary_operation[binary_operation[constant[zeq_magic.py -fsp pmag_specimens.txt -sav -fmt ] + name[fmt]] + constant[ -crd ]] + name[crd]]]]
call[name[os].system, parameter[binary_operation[binary_operation[binary_operation[constant[zeq_magic.py -sav -fmt ] + name[fmt]] + constant[ -crd ]] + name[crd]]]]
if compare[call[name[len], parameter[call[name[pmag].get_dictitem, parameter[name[data], constant[magic_method_codes], constant[LP-PI-TRM], constant[has]]]]] greater[>] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[thellier_magic.py -fsp pmag_specimens.txt -sav -fmt ] + name[fmt]]]]
call[name[os].system, parameter[binary_operation[constant[thellier_magic.py -sav -fmt ] + name[fmt]]]]
if compare[call[name[len], parameter[call[name[pmag].get_dictitem, parameter[name[data], constant[magic_method_codes], constant[LP-HYS], constant[has]]]]] greater[>] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[quick_hyst.py -sav -fmt ] + name[fmt]]]]
call[name[os].system, parameter[binary_operation[constant[quick_hyst.py -sav -fmt ] + name[fmt]]]]
if compare[constant[pmag_results.txt] in name[filelist]] begin[:]
<ast.Tuple object at 0x7da1b05c5150> assign[=] call[name[pmag].magic_read, parameter[constant[pmag_results.txt]]]
call[name[print], parameter[constant[number of datapoints: ], call[name[len], parameter[name[data]]]]]
if compare[name[loc] equal[==] constant[./]] begin[:]
variable[data] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[er_location_names], constant[:], constant[has]]]
call[name[print], parameter[constant[number of datapoints: ], call[name[len], parameter[name[data]]], name[loc]]]
call[name[print], parameter[constant[working on pmag_results directions]]]
variable[SiteDIs] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[average_dec], constant[], constant[F]]]
call[name[print], parameter[constant[number of directions: ], call[name[len], parameter[name[SiteDIs]]]]]
variable[SiteDIs] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteDIs], constant[average_inc], constant[], constant[F]]]
call[name[print], parameter[constant[number of directions: ], call[name[len], parameter[name[SiteDIs]]]]]
variable[SiteDIs] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteDIs], constant[data_type], constant[i], constant[has]]]
call[name[print], parameter[constant[number of directions: ], call[name[len], parameter[name[SiteDIs]]]]]
variable[SiteDIs_t] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteDIs], constant[tilt_correction], constant[100], constant[T]]]
call[name[print], parameter[constant[number of directions: ], call[name[len], parameter[name[SiteDIs]]]]]
if compare[call[name[len], parameter[name[SiteDIs_t]]] greater[>] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[eqarea_magic.py -sav -crd t -fmt ] + name[fmt]]]]
call[name[os].system, parameter[binary_operation[constant[eqarea_magic.py -sav -crd t -fmt ] + name[fmt]]]]
call[name[print], parameter[constant[working on pmag_results VGP map]]]
variable[VGPs] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteDIs], constant[vgp_lat], constant[], constant[F]]]
if compare[call[name[len], parameter[name[VGPs]]] greater[>] constant[0]] begin[:]
call[name[os].system, parameter[constant[vgpmap_magic.py -prj moll -res c -sym ro 5 -sav -fmt png]]]
call[name[print], parameter[constant[working on pmag_results intensities]]]
call[name[os].system, parameter[constant[magic_select.py -f pmag_results.txt -key data_type i T -F tmp.txt]]]
call[name[os].system, parameter[constant[magic_select.py -f tmp.txt -key average_int 0. has -F tmp1.txt]]]
call[name[os].system, parameter[constant[grab_magic_key.py -f tmp1.txt -key average_int | awk '{print $1*1e6}' >tmp2.txt]]]
<ast.Tuple object at 0x7da1b0594160> assign[=] call[name[pmag].magic_read, parameter[constant[tmp1.txt]]]
variable[locations] assign[=] call[name[pmag].get_dictkey, parameter[name[data], constant[er_location_names], constant[]]]
variable[histfile] assign[=] binary_operation[binary_operation[binary_operation[constant[LO:_] + call[name[locations]][constant[0]]] + constant[_intensities_histogram:_.]] + name[fmt]]
call[name[os].system, parameter[binary_operation[constant[histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F ] + name[histfile]]]]
call[name[print], parameter[binary_operation[constant[histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F ] + name[histfile]]]]
call[name[os].system, parameter[constant[rm tmp*.txt]]]
if compare[constant[rmag_hysteresis.txt] in name[filelist]] begin[:]
call[name[print], parameter[constant[working on rmag_hysteresis]]]
<ast.Tuple object at 0x7da1b0531c30> assign[=] call[name[pmag].magic_read, parameter[constant[rmag_hysteresis.txt]]]
if compare[name[loc] equal[==] constant[./]] begin[:]
variable[data] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[er_location_name], constant[], constant[T]]]
variable[hdata] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[hysteresis_bcr], constant[], constant[F]]]
variable[hdata] assign[=] call[name[pmag].get_dictitem, parameter[name[hdata], constant[hysteresis_mr_moment], constant[], constant[F]]]
variable[hdata] assign[=] call[name[pmag].get_dictitem, parameter[name[hdata], constant[hysteresis_ms_moment], constant[], constant[F]]]
variable[hdata] assign[=] call[name[pmag].get_dictitem, parameter[name[hdata], constant[hysteresis_bc], constant[], constant[F]]]
if compare[call[name[len], parameter[name[hdata]]] greater[>] constant[0]] begin[:]
call[name[print], parameter[binary_operation[constant[dayplot_magic.py -sav -fmt ] + name[fmt]]]]
call[name[os].system, parameter[binary_operation[constant[dayplot_magic.py -sav -fmt ] + name[fmt]]]]
if compare[constant[rmag_anisotropy.txt] in name[filelist]] begin[:]
call[name[print], parameter[constant[working on rmag_anisotropy]]]
<ast.Tuple object at 0x7da1b0591a20> assign[=] call[name[pmag].magic_read, parameter[constant[rmag_anisotropy.txt]]]
if compare[name[loc] equal[==] constant[./]] begin[:]
variable[data] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[er_location_name], constant[], constant[T]]]
variable[sdata] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[anisotropy_tilt_correction], constant[-1], constant[T]]]
variable[gdata] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[anisotropy_tilt_correction], constant[0], constant[T]]]
variable[tdata] assign[=] call[name[pmag].get_dictitem, parameter[name[data], constant[anisotropy_tilt_correction], constant[100], constant[T]]]
if compare[call[name[len], parameter[name[sdata]]] greater[>] constant[3]] begin[:]
call[name[print], parameter[binary_operation[constant[aniso_magic.py -x -B -crd s -sav -fmt ] + name[fmt]]]]
call[name[os].system, parameter[binary_operation[constant[aniso_magic.py -x -B -crd s -sav -fmt ] + name[fmt]]]]
if compare[call[name[len], parameter[name[gdata]]] greater[>] constant[3]] begin[:]
call[name[os].system, parameter[binary_operation[constant[aniso_magic.py -x -B -crd g -sav -fmt ] + name[fmt]]]]
if compare[call[name[len], parameter[name[tdata]]] greater[>] constant[3]] begin[:]
call[name[os].system, parameter[binary_operation[constant[aniso_magic.py -x -B -crd t -sav -fmt ] + name[fmt]]]]
if compare[name[loc] not_equal[!=] constant[./]] begin[:]
call[name[os].chdir, parameter[constant[..]]] | keyword[def] identifier[main] ():
literal[string]
identifier[dirlist] =[ literal[string] ]
identifier[dir_path] = identifier[os] . identifier[getcwd] ()
identifier[names] = identifier[os] . identifier[listdir] ( identifier[dir_path] )
keyword[for] identifier[n] keyword[in] identifier[names] :
keyword[if] literal[string] keyword[in] identifier[n] :
identifier[dirlist] . identifier[append] ( identifier[n] )
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[fmt] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]
keyword[else] :
identifier[fmt] = literal[string]
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[ind] = identifier[sys] . identifier[argv] . identifier[index] ( literal[string] )
identifier[filelist] =[ identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ]]
keyword[else] :
identifier[filelist] = identifier[os] . identifier[listdir] ( identifier[dir_path] )
keyword[if] literal[string] keyword[in] identifier[sys] . identifier[argv] :
identifier[print] ( identifier[main] . identifier[__doc__] )
identifier[sys] . identifier[exit] ()
keyword[for] identifier[loc] keyword[in] identifier[dirlist] :
identifier[print] ( literal[string] , identifier[loc] )
identifier[os] . identifier[chdir] ( identifier[loc] )
identifier[crd] = literal[string]
keyword[if] literal[string] keyword[in] identifier[filelist] :
identifier[samps] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
literal[string] )
identifier[Srecs] = identifier[pmag] . identifier[get_dictitem] ( identifier[samps] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[Srecs] )> literal[int] :
identifier[crd] = literal[string]
keyword[if] literal[string] keyword[in] identifier[filelist] :
identifier[print] ( literal[string] )
identifier[data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
literal[string] )
keyword[if] identifier[loc] == literal[string] :
identifier[data] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[AFZrecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[TZrecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[MZrecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[Drecs] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[Irecs] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[Mkeys] =[ literal[string] , literal[string] ,
literal[string] , literal[string] ]
keyword[for] identifier[key] keyword[in] identifier[Mkeys] :
identifier[Mrecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , identifier[key] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[Mrecs] )> literal[int] :
keyword[break]
keyword[if] identifier[len] ( identifier[AFZrecs] )> literal[int] keyword[or] identifier[len] ( identifier[TZrecs] )> literal[int] keyword[or] identifier[len] ( identifier[MZrecs] )> literal[int] keyword[and] identifier[len] ( identifier[Drecs] )> literal[int] keyword[and] identifier[len] ( identifier[Irecs] )> literal[int] keyword[and] identifier[len] ( identifier[Mrecs] )> literal[int] :
identifier[print] ( literal[string] +
identifier[fmt] + literal[string] + identifier[crd] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] + literal[string] + identifier[crd] )
keyword[if] identifier[len] ( identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] ))> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] identifier[len] ( identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] ))> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] literal[string] keyword[in] identifier[filelist] :
identifier[data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[data] ))
keyword[if] identifier[loc] == literal[string] :
identifier[data] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[data] ), identifier[loc] )
identifier[print] ( literal[string] )
identifier[SiteDIs] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[SiteDIs] ))
identifier[SiteDIs] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[SiteDIs] ))
identifier[SiteDIs] = identifier[pmag] . identifier[get_dictitem] ( identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[SiteDIs] ))
identifier[SiteDIs_t] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
identifier[print] ( literal[string] , identifier[len] ( identifier[SiteDIs] ))
keyword[if] identifier[len] ( identifier[SiteDIs_t] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[elif] identifier[len] ( identifier[SiteDIs] )> literal[int] keyword[and] literal[string] keyword[not] keyword[in] identifier[SiteDIs] [ literal[int] ]. identifier[keys] ():
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[else] :
identifier[SiteDIs_g] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[SiteDIs_g] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[else] :
identifier[SiteDIs_s] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[SiteDIs_s] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[else] :
identifier[SiteDIs_x] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[SiteDIs_x] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
identifier[print] ( literal[string] )
identifier[VGPs] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteDIs] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[VGPs] )> literal[int] :
identifier[os] . identifier[system] (
literal[string] )
identifier[print] ( literal[string] )
identifier[os] . identifier[system] (
literal[string] )
identifier[os] . identifier[system] (
literal[string] )
identifier[os] . identifier[system] (
literal[string] )
identifier[data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( literal[string] )
identifier[locations] = identifier[pmag] . identifier[get_dictkey] ( identifier[data] , literal[string] , literal[string] )
identifier[histfile] = literal[string] + identifier[locations] [ literal[int] ]+ literal[string] + identifier[fmt]
identifier[os] . identifier[system] (
literal[string] + identifier[histfile] )
identifier[print] (
literal[string] + identifier[histfile] )
identifier[os] . identifier[system] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[filelist] :
identifier[print] ( literal[string] )
identifier[data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
literal[string] )
keyword[if] identifier[loc] == literal[string] :
identifier[data] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[hdata] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[hdata] = identifier[pmag] . identifier[get_dictitem] ( identifier[hdata] , literal[string] , literal[string] , literal[string] )
identifier[hdata] = identifier[pmag] . identifier[get_dictitem] ( identifier[hdata] , literal[string] , literal[string] , literal[string] )
identifier[hdata] = identifier[pmag] . identifier[get_dictitem] ( identifier[hdata] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[hdata] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] literal[string] keyword[in] identifier[filelist] :
identifier[print] ( literal[string] )
identifier[data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
literal[string] )
keyword[if] identifier[loc] == literal[string] :
identifier[data] = identifier[pmag] . identifier[get_dictitem] ( identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[sdata] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[gdata] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
identifier[tdata] = identifier[pmag] . identifier[get_dictitem] (
identifier[data] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[sdata] )> literal[int] :
identifier[print] ( literal[string] + identifier[fmt] )
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] identifier[len] ( identifier[gdata] )> literal[int] :
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] identifier[len] ( identifier[tdata] )> literal[int] :
identifier[os] . identifier[system] ( literal[string] + identifier[fmt] )
keyword[if] identifier[loc] != literal[string] :
identifier[os] . identifier[chdir] ( literal[string] ) | def main():
"""
NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available plots.
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png
"""
dirlist = ['./']
dir_path = os.getcwd()
names = os.listdir(dir_path)
for n in names:
if 'Location' in n:
dirlist.append(n) # depends on [control=['if'], data=['n']] # depends on [control=['for'], data=['n']]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind + 1] # depends on [control=['if'], data=[]]
else:
fmt = 'png'
if '-f' in sys.argv:
ind = sys.argv.index('-f')
filelist = [sys.argv[ind + 1]] # depends on [control=['if'], data=[]]
else:
filelist = os.listdir(dir_path)
if '-h' in sys.argv:
print(main.__doc__)
sys.exit() # depends on [control=['if'], data=[]]
for loc in dirlist:
print('working on: ', loc)
os.chdir(loc) # change working directories to each location
crd = 's'
if 'er_samples.txt' in filelist: # find coordinate systems
(samps, file_type) = pmag.magic_read('er_samples.txt') # read in data
# get all none blank sample orientations
Srecs = pmag.get_dictitem(samps, 'sample_azimuth', '', 'F')
if len(Srecs) > 0:
crd = 'g' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'magic_measurements.txt' in filelist: # start with measurement data
print('working on measurements data')
(data, file_type) = pmag.magic_read('magic_measurements.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T') # depends on [control=['if'], data=[]]
# looking for zeq_magic possibilities
# get all none blank method codes
AFZrecs = pmag.get_dictitem(data, 'magic_method_codes', 'LT-AF-Z', 'has')
# get all none blank method codes
TZrecs = pmag.get_dictitem(data, 'magic_method_codes', 'LT-T-Z', 'has')
# get all none blank method codes
MZrecs = pmag.get_dictitem(data, 'magic_method_codes', 'LT-M-Z', 'has')
# get all dec measurements
Drecs = pmag.get_dictitem(data, 'measurement_dec', '', 'F')
# get all dec measurements
Irecs = pmag.get_dictitem(data, 'measurement_inc', '', 'F')
Mkeys = ['measurement_magnitude', 'measurement_magn_moment', 'measurement_magn_volume', 'measurement_magn_mass']
for key in Mkeys:
Mrecs = pmag.get_dictitem(data, key, '', 'F') # get intensity data
if len(Mrecs) > 0:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
# potential for stepwise demag curves
if len(AFZrecs) > 0 or len(TZrecs) > 0 or (len(MZrecs) > 0 and len(Drecs) > 0 and (len(Irecs) > 0) and (len(Mrecs) > 0)):
print('zeq_magic.py -fsp pmag_specimens.txt -sav -fmt ' + fmt + ' -crd ' + crd)
os.system('zeq_magic.py -sav -fmt ' + fmt + ' -crd ' + crd) # depends on [control=['if'], data=[]]
# looking for thellier_magic possibilities
if len(pmag.get_dictitem(data, 'magic_method_codes', 'LP-PI-TRM', 'has')) > 0:
print('thellier_magic.py -fsp pmag_specimens.txt -sav -fmt ' + fmt)
os.system('thellier_magic.py -sav -fmt ' + fmt) # depends on [control=['if'], data=[]]
# looking for hysteresis possibilities
if len(pmag.get_dictitem(data, 'magic_method_codes', 'LP-HYS', 'has')) > 0: # find hyst experiments
print('quick_hyst.py -sav -fmt ' + fmt)
os.system('quick_hyst.py -sav -fmt ' + fmt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'pmag_results.txt' in filelist: # start with measurement data
(data, file_type) = pmag.magic_read('pmag_results.txt') # read in data
print('number of datapoints: ', len(data))
if loc == './':
# get all the concatenated location names from data file
data = pmag.get_dictitem(data, 'er_location_names', ':', 'has') # depends on [control=['if'], data=[]]
print('number of datapoints: ', len(data), loc)
print('working on pmag_results directions')
SiteDIs = pmag.get_dictitem(data, 'average_dec', '', 'F') # find decs
print('number of directions: ', len(SiteDIs))
SiteDIs = pmag.get_dictitem(SiteDIs, 'average_inc', '', 'F') # find decs and incs
print('number of directions: ', len(SiteDIs))
# only individual results - not poles
SiteDIs = pmag.get_dictitem(SiteDIs, 'data_type', 'i', 'has')
print('number of directions: ', len(SiteDIs))
# tilt corrected coordinates
SiteDIs_t = pmag.get_dictitem(SiteDIs, 'tilt_correction', '100', 'T')
print('number of directions: ', len(SiteDIs))
if len(SiteDIs_t) > 0:
print('eqarea_magic.py -sav -crd t -fmt ' + fmt)
os.system('eqarea_magic.py -sav -crd t -fmt ' + fmt) # depends on [control=['if'], data=[]]
elif len(SiteDIs) > 0 and 'tilt_correction' not in SiteDIs[0].keys():
print('eqarea_magic.py -sav -fmt ' + fmt)
os.system('eqarea_magic.py -sav -fmt ' + fmt) # depends on [control=['if'], data=[]]
else:
SiteDIs_g = pmag.get_dictitem(SiteDIs, 'tilt_correction', '0', 'T') # geographic coordinates
if len(SiteDIs_g) > 0:
print('eqarea_magic.py -sav -crd g -fmt ' + fmt)
os.system('eqarea_magic.py -sav -crd g -fmt ' + fmt) # depends on [control=['if'], data=[]]
else:
SiteDIs_s = pmag.get_dictitem(SiteDIs, 'tilt_correction', '-1', 'T') # sample coordinates
if len(SiteDIs_s) > 0:
print('eqarea_magic.py -sav -crd s -fmt ' + fmt)
os.system('eqarea_magic.py -sav -crd s -fmt ' + fmt) # depends on [control=['if'], data=[]]
else:
SiteDIs_x = pmag.get_dictitem(SiteDIs, 'tilt_correction', '', 'T') # no coordinates
if len(SiteDIs_x) > 0:
print('eqarea_magic.py -sav -fmt ' + fmt)
os.system('eqarea_magic.py -sav -fmt ' + fmt) # depends on [control=['if'], data=[]]
print('working on pmag_results VGP map')
VGPs = pmag.get_dictitem(SiteDIs, 'vgp_lat', '', 'F') # are there any VGPs?
if len(VGPs) > 0: # YES!
os.system('vgpmap_magic.py -prj moll -res c -sym ro 5 -sav -fmt png') # depends on [control=['if'], data=[]]
print('working on pmag_results intensities')
os.system('magic_select.py -f pmag_results.txt -key data_type i T -F tmp.txt')
os.system('magic_select.py -f tmp.txt -key average_int 0. has -F tmp1.txt')
os.system("grab_magic_key.py -f tmp1.txt -key average_int | awk '{print $1*1e6}' >tmp2.txt")
(data, file_type) = pmag.magic_read('tmp1.txt') # read in data
locations = pmag.get_dictkey(data, 'er_location_names', '')
histfile = 'LO:_' + locations[0] + '_intensities_histogram:_.' + fmt
os.system("histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F " + histfile)
print("histplot.py -b 1 -xlab 'Intensity (uT)' -sav -f tmp2.txt -F " + histfile)
os.system('rm tmp*.txt') # depends on [control=['if'], data=[]]
if 'rmag_hysteresis.txt' in filelist: # start with measurement data
print('working on rmag_hysteresis')
(data, file_type) = pmag.magic_read('rmag_hysteresis.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T') # depends on [control=['if'], data=[]]
hdata = pmag.get_dictitem(data, 'hysteresis_bcr', '', 'F')
hdata = pmag.get_dictitem(hdata, 'hysteresis_mr_moment', '', 'F')
hdata = pmag.get_dictitem(hdata, 'hysteresis_ms_moment', '', 'F')
# there are data for a dayplot
hdata = pmag.get_dictitem(hdata, 'hysteresis_bc', '', 'F')
if len(hdata) > 0:
print('dayplot_magic.py -sav -fmt ' + fmt)
os.system('dayplot_magic.py -sav -fmt ' + fmt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# if 'er_sites.txt' in filelist: # start with measurement data
# print 'working on er_sites'
#os.system('basemap_magic.py -sav -fmt '+fmt)
if 'rmag_anisotropy.txt' in filelist: # do anisotropy plots if possible
print('working on rmag_anisotropy')
(data, file_type) = pmag.magic_read('rmag_anisotropy.txt') # read in data
if loc == './':
# get all the blank location names from data file
data = pmag.get_dictitem(data, 'er_location_name', '', 'T') # depends on [control=['if'], data=[]]
# get specimen coordinates
sdata = pmag.get_dictitem(data, 'anisotropy_tilt_correction', '-1', 'T')
# get specimen coordinates
gdata = pmag.get_dictitem(data, 'anisotropy_tilt_correction', '0', 'T')
# get specimen coordinates
tdata = pmag.get_dictitem(data, 'anisotropy_tilt_correction', '100', 'T')
if len(sdata) > 3:
print('aniso_magic.py -x -B -crd s -sav -fmt ' + fmt)
os.system('aniso_magic.py -x -B -crd s -sav -fmt ' + fmt) # depends on [control=['if'], data=[]]
if len(gdata) > 3:
os.system('aniso_magic.py -x -B -crd g -sav -fmt ' + fmt) # depends on [control=['if'], data=[]]
if len(tdata) > 3:
os.system('aniso_magic.py -x -B -crd t -sav -fmt ' + fmt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if loc != './':
os.chdir('..') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['loc']] |
def restore_text_setting(self, key, line_edit):
"""Set line_edit text according to setting of key.
:param key: Key to retrieve setting value.
:type key: str
:param line_edit: Line edit for user to edit the setting
:type line_edit: PyQt5.QtWidgets.QLineEdit.QLineEdit
"""
value = setting(key, expected_type=str, qsettings=self.settings)
line_edit.setText(value) | def function[restore_text_setting, parameter[self, key, line_edit]]:
constant[Set line_edit text according to setting of key.
:param key: Key to retrieve setting value.
:type key: str
:param line_edit: Line edit for user to edit the setting
:type line_edit: PyQt5.QtWidgets.QLineEdit.QLineEdit
]
variable[value] assign[=] call[name[setting], parameter[name[key]]]
call[name[line_edit].setText, parameter[name[value]]] | keyword[def] identifier[restore_text_setting] ( identifier[self] , identifier[key] , identifier[line_edit] ):
literal[string]
identifier[value] = identifier[setting] ( identifier[key] , identifier[expected_type] = identifier[str] , identifier[qsettings] = identifier[self] . identifier[settings] )
identifier[line_edit] . identifier[setText] ( identifier[value] ) | def restore_text_setting(self, key, line_edit):
"""Set line_edit text according to setting of key.
:param key: Key to retrieve setting value.
:type key: str
:param line_edit: Line edit for user to edit the setting
:type line_edit: PyQt5.QtWidgets.QLineEdit.QLineEdit
"""
value = setting(key, expected_type=str, qsettings=self.settings)
line_edit.setText(value) |
def get_uservar(self, user, name):
"""Get a variable about a user.
:param str user: The user ID to look up a variable for.
:param str name: The name of the variable to get.
:return: The user variable, or ``None`` or ``"undefined"``:
* If the user has no data at all, this returns ``None``.
* If the user doesn't have this variable set, this returns the
string ``"undefined"``.
* Otherwise this returns the string value of the variable.
"""
if name == '__lastmatch__': # Treat var `__lastmatch__` since it can't receive "undefined" value
return self.last_match(user)
else:
return self._session.get(user, name) | def function[get_uservar, parameter[self, user, name]]:
constant[Get a variable about a user.
:param str user: The user ID to look up a variable for.
:param str name: The name of the variable to get.
:return: The user variable, or ``None`` or ``"undefined"``:
* If the user has no data at all, this returns ``None``.
* If the user doesn't have this variable set, this returns the
string ``"undefined"``.
* Otherwise this returns the string value of the variable.
]
if compare[name[name] equal[==] constant[__lastmatch__]] begin[:]
return[call[name[self].last_match, parameter[name[user]]]] | keyword[def] identifier[get_uservar] ( identifier[self] , identifier[user] , identifier[name] ):
literal[string]
keyword[if] identifier[name] == literal[string] :
keyword[return] identifier[self] . identifier[last_match] ( identifier[user] )
keyword[else] :
keyword[return] identifier[self] . identifier[_session] . identifier[get] ( identifier[user] , identifier[name] ) | def get_uservar(self, user, name):
"""Get a variable about a user.
:param str user: The user ID to look up a variable for.
:param str name: The name of the variable to get.
:return: The user variable, or ``None`` or ``"undefined"``:
* If the user has no data at all, this returns ``None``.
* If the user doesn't have this variable set, this returns the
string ``"undefined"``.
* Otherwise this returns the string value of the variable.
"""
if name == '__lastmatch__': # Treat var `__lastmatch__` since it can't receive "undefined" value
return self.last_match(user) # depends on [control=['if'], data=[]]
else:
return self._session.get(user, name) |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'global_') and self.global_ is not None:
_dict['global'] = self.global_._to_dict()
if hasattr(self, 'skills') and self.skills is not None:
_dict['skills'] = self.skills._to_dict()
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da18dc05f30> begin[:]
call[name[_dict]][constant[global]] assign[=] call[name[self].global_._to_dict, parameter[]]
if <ast.BoolOp object at 0x7da1b1b44820> begin[:]
call[name[_dict]][constant[skills]] assign[=] call[name[self].skills._to_dict, parameter[]]
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[global_] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[global_] . identifier[_to_dict] ()
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[self] . identifier[skills] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[skills] . identifier[_to_dict] ()
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'global_') and self.global_ is not None:
_dict['global'] = self.global_._to_dict() # depends on [control=['if'], data=[]]
if hasattr(self, 'skills') and self.skills is not None:
_dict['skills'] = self.skills._to_dict() # depends on [control=['if'], data=[]]
return _dict |
def has_property(self, property_name):
"""
Check if schema has property
:param property_name: str, name to check
:return: bool
"""
if property_name in self.properties:
return True
elif property_name in self.entities:
return True
elif property_name in self.collections:
return True
else:
return False | def function[has_property, parameter[self, property_name]]:
constant[
Check if schema has property
:param property_name: str, name to check
:return: bool
]
if compare[name[property_name] in name[self].properties] begin[:]
return[constant[True]] | keyword[def] identifier[has_property] ( identifier[self] , identifier[property_name] ):
literal[string]
keyword[if] identifier[property_name] keyword[in] identifier[self] . identifier[properties] :
keyword[return] keyword[True]
keyword[elif] identifier[property_name] keyword[in] identifier[self] . identifier[entities] :
keyword[return] keyword[True]
keyword[elif] identifier[property_name] keyword[in] identifier[self] . identifier[collections] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def has_property(self, property_name):
"""
Check if schema has property
:param property_name: str, name to check
:return: bool
"""
if property_name in self.properties:
return True # depends on [control=['if'], data=[]]
elif property_name in self.entities:
return True # depends on [control=['if'], data=[]]
elif property_name in self.collections:
return True # depends on [control=['if'], data=[]]
else:
return False |
def module_entry(yfile):
"""Add entry for one file containing YANG module text.
Args:
yfile (file): File containing a YANG module or submodule.
"""
ytxt = yfile.read()
mp = ModuleParser(ytxt)
mst = mp.statement()
submod = mst.keyword == "submodule"
import_only = True
rev = ""
features = []
includes = []
rec = {}
for sst in mst.substatements:
if not rev and sst.keyword == "revision":
rev = sst.argument
elif import_only and sst.keyword in data_kws:
import_only = False
elif sst.keyword == "feature":
features.append(sst.argument)
elif submod:
continue
elif sst.keyword == "namespace":
rec["namespace"] = sst.argument
elif sst.keyword == "include":
rd = sst.find1("revision-date")
includes.append((sst.argument, rd.argument if rd else None))
rec["import-only"] = import_only
rec["features"] = features
if submod:
rec["revision"] = rev
submodmap[mst.argument] = rec
else:
rec["includes"] = includes
modmap[(mst.argument, rev)] = rec | def function[module_entry, parameter[yfile]]:
constant[Add entry for one file containing YANG module text.
Args:
yfile (file): File containing a YANG module or submodule.
]
variable[ytxt] assign[=] call[name[yfile].read, parameter[]]
variable[mp] assign[=] call[name[ModuleParser], parameter[name[ytxt]]]
variable[mst] assign[=] call[name[mp].statement, parameter[]]
variable[submod] assign[=] compare[name[mst].keyword equal[==] constant[submodule]]
variable[import_only] assign[=] constant[True]
variable[rev] assign[=] constant[]
variable[features] assign[=] list[[]]
variable[includes] assign[=] list[[]]
variable[rec] assign[=] dictionary[[], []]
for taget[name[sst]] in starred[name[mst].substatements] begin[:]
if <ast.BoolOp object at 0x7da1b02e4490> begin[:]
variable[rev] assign[=] name[sst].argument
call[name[rec]][constant[import-only]] assign[=] name[import_only]
call[name[rec]][constant[features]] assign[=] name[features]
if name[submod] begin[:]
call[name[rec]][constant[revision]] assign[=] name[rev]
call[name[submodmap]][name[mst].argument] assign[=] name[rec] | keyword[def] identifier[module_entry] ( identifier[yfile] ):
literal[string]
identifier[ytxt] = identifier[yfile] . identifier[read] ()
identifier[mp] = identifier[ModuleParser] ( identifier[ytxt] )
identifier[mst] = identifier[mp] . identifier[statement] ()
identifier[submod] = identifier[mst] . identifier[keyword] == literal[string]
identifier[import_only] = keyword[True]
identifier[rev] = literal[string]
identifier[features] =[]
identifier[includes] =[]
identifier[rec] ={}
keyword[for] identifier[sst] keyword[in] identifier[mst] . identifier[substatements] :
keyword[if] keyword[not] identifier[rev] keyword[and] identifier[sst] . identifier[keyword] == literal[string] :
identifier[rev] = identifier[sst] . identifier[argument]
keyword[elif] identifier[import_only] keyword[and] identifier[sst] . identifier[keyword] keyword[in] identifier[data_kws] :
identifier[import_only] = keyword[False]
keyword[elif] identifier[sst] . identifier[keyword] == literal[string] :
identifier[features] . identifier[append] ( identifier[sst] . identifier[argument] )
keyword[elif] identifier[submod] :
keyword[continue]
keyword[elif] identifier[sst] . identifier[keyword] == literal[string] :
identifier[rec] [ literal[string] ]= identifier[sst] . identifier[argument]
keyword[elif] identifier[sst] . identifier[keyword] == literal[string] :
identifier[rd] = identifier[sst] . identifier[find1] ( literal[string] )
identifier[includes] . identifier[append] (( identifier[sst] . identifier[argument] , identifier[rd] . identifier[argument] keyword[if] identifier[rd] keyword[else] keyword[None] ))
identifier[rec] [ literal[string] ]= identifier[import_only]
identifier[rec] [ literal[string] ]= identifier[features]
keyword[if] identifier[submod] :
identifier[rec] [ literal[string] ]= identifier[rev]
identifier[submodmap] [ identifier[mst] . identifier[argument] ]= identifier[rec]
keyword[else] :
identifier[rec] [ literal[string] ]= identifier[includes]
identifier[modmap] [( identifier[mst] . identifier[argument] , identifier[rev] )]= identifier[rec] | def module_entry(yfile):
"""Add entry for one file containing YANG module text.
Args:
yfile (file): File containing a YANG module or submodule.
"""
ytxt = yfile.read()
mp = ModuleParser(ytxt)
mst = mp.statement()
submod = mst.keyword == 'submodule'
import_only = True
rev = ''
features = []
includes = []
rec = {}
for sst in mst.substatements:
if not rev and sst.keyword == 'revision':
rev = sst.argument # depends on [control=['if'], data=[]]
elif import_only and sst.keyword in data_kws:
import_only = False # depends on [control=['if'], data=[]]
elif sst.keyword == 'feature':
features.append(sst.argument) # depends on [control=['if'], data=[]]
elif submod:
continue # depends on [control=['if'], data=[]]
elif sst.keyword == 'namespace':
rec['namespace'] = sst.argument # depends on [control=['if'], data=[]]
elif sst.keyword == 'include':
rd = sst.find1('revision-date')
includes.append((sst.argument, rd.argument if rd else None)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sst']]
rec['import-only'] = import_only
rec['features'] = features
if submod:
rec['revision'] = rev
submodmap[mst.argument] = rec # depends on [control=['if'], data=[]]
else:
rec['includes'] = includes
modmap[mst.argument, rev] = rec |
def update_handler(Model, name=None, **kwds):
"""
This factory returns an action handler that updates a new instance of
the specified model when a update action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to update when the action
received.
Returns:
function(type, payload): The action handler for this model
"""
async def action_handler(service, action_type, payload, props, notify=True, **kwds):
# if the payload represents a new instance of `Model`
if action_type == get_crud_action('update', name or Model):
try:
# the props of the message
message_props = {}
# if there was a correlation id in the request
if 'correlation_id' in props:
# make sure it ends up in the reply
message_props['correlation_id'] = props['correlation_id']
# grab the nam eof the primary key for the model
pk_field = Model.primary_key()
# make sure there is a primary key to id the model
if not pk_field.name in payload:
# yell loudly
raise ValueError("Must specify the pk of the model when updating")
# grab the matching model
model = Model.select().where(pk_field == payload[pk_field.name]).get()
# remove the key from the payload
payload.pop(pk_field.name, None)
# for every key,value pair
for key, value in payload.items():
# TODO: add protection for certain fields from being
# changed by the api
setattr(model, key, value)
# save the updates
model.save()
# if we need to tell someone about what happened
if notify:
# publish the scucess event
await service.event_broker.send(
payload=ModelSerializer().serialize(model),
action_type=change_action_status(action_type, success_status()),
**message_props
)
# if something goes wrong
except Exception as err:
# if we need to tell someone about what happened
if notify:
# publish the error as an event
await service.event_broker.send(
payload=str(err),
action_type=change_action_status(action_type, error_status()),
**message_props
)
# otherwise we aren't supposed to notify
else:
# raise the exception normally
raise err
# return the handler
return action_handler | def function[update_handler, parameter[Model, name]]:
constant[
This factory returns an action handler that updates a new instance of
the specified model when a update action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to update when the action
received.
Returns:
function(type, payload): The action handler for this model
]
<ast.AsyncFunctionDef object at 0x7da1b0fde740>
return[name[action_handler]] | keyword[def] identifier[update_handler] ( identifier[Model] , identifier[name] = keyword[None] ,** identifier[kwds] ):
literal[string]
keyword[async] keyword[def] identifier[action_handler] ( identifier[service] , identifier[action_type] , identifier[payload] , identifier[props] , identifier[notify] = keyword[True] ,** identifier[kwds] ):
keyword[if] identifier[action_type] == identifier[get_crud_action] ( literal[string] , identifier[name] keyword[or] identifier[Model] ):
keyword[try] :
identifier[message_props] ={}
keyword[if] literal[string] keyword[in] identifier[props] :
identifier[message_props] [ literal[string] ]= identifier[props] [ literal[string] ]
identifier[pk_field] = identifier[Model] . identifier[primary_key] ()
keyword[if] keyword[not] identifier[pk_field] . identifier[name] keyword[in] identifier[payload] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[model] = identifier[Model] . identifier[select] (). identifier[where] ( identifier[pk_field] == identifier[payload] [ identifier[pk_field] . identifier[name] ]). identifier[get] ()
identifier[payload] . identifier[pop] ( identifier[pk_field] . identifier[name] , keyword[None] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[payload] . identifier[items] ():
identifier[setattr] ( identifier[model] , identifier[key] , identifier[value] )
identifier[model] . identifier[save] ()
keyword[if] identifier[notify] :
keyword[await] identifier[service] . identifier[event_broker] . identifier[send] (
identifier[payload] = identifier[ModelSerializer] (). identifier[serialize] ( identifier[model] ),
identifier[action_type] = identifier[change_action_status] ( identifier[action_type] , identifier[success_status] ()),
** identifier[message_props]
)
keyword[except] identifier[Exception] keyword[as] identifier[err] :
keyword[if] identifier[notify] :
keyword[await] identifier[service] . identifier[event_broker] . identifier[send] (
identifier[payload] = identifier[str] ( identifier[err] ),
identifier[action_type] = identifier[change_action_status] ( identifier[action_type] , identifier[error_status] ()),
** identifier[message_props]
)
keyword[else] :
keyword[raise] identifier[err]
keyword[return] identifier[action_handler] | def update_handler(Model, name=None, **kwds):
"""
This factory returns an action handler that updates a new instance of
the specified model when a update action is recieved, assuming the
action follows nautilus convetions.
Args:
Model (nautilus.BaseModel): The model to update when the action
received.
Returns:
function(type, payload): The action handler for this model
"""
async def action_handler(service, action_type, payload, props, notify=True, **kwds):
# if the payload represents a new instance of `Model`
if action_type == get_crud_action('update', name or Model):
try:
# the props of the message
message_props = {}
# if there was a correlation id in the request
if 'correlation_id' in props:
# make sure it ends up in the reply
message_props['correlation_id'] = props['correlation_id'] # depends on [control=['if'], data=['props']]
# grab the nam eof the primary key for the model
pk_field = Model.primary_key()
# make sure there is a primary key to id the model
if not pk_field.name in payload:
# yell loudly
raise ValueError('Must specify the pk of the model when updating') # depends on [control=['if'], data=[]]
# grab the matching model
model = Model.select().where(pk_field == payload[pk_field.name]).get()
# remove the key from the payload
payload.pop(pk_field.name, None)
# for every key,value pair
for (key, value) in payload.items():
# TODO: add protection for certain fields from being
# changed by the api
setattr(model, key, value) # depends on [control=['for'], data=[]]
# save the updates
model.save()
# if we need to tell someone about what happened
if notify:
# publish the scucess event
await service.event_broker.send(payload=ModelSerializer().serialize(model), action_type=change_action_status(action_type, success_status()), **message_props) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
# if something goes wrong
except Exception as err:
# if we need to tell someone about what happened
if notify:
# publish the error as an event
await service.event_broker.send(payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props) # depends on [control=['if'], data=[]]
else:
# otherwise we aren't supposed to notify
# raise the exception normally
raise err # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=['action_type']]
# return the handler
return action_handler |
def from_pubsec_file(cls: Type[SigningKeyType], path: str) -> SigningKeyType:
"""
Return SigningKey instance from Duniter WIF file
:param path: Path to WIF file
"""
with open(path, 'r') as fh:
pubsec_content = fh.read()
# line patterns
regex_pubkey = compile("pub: ([1-9A-HJ-NP-Za-km-z]{43,44})", MULTILINE)
regex_signkey = compile("sec: ([1-9A-HJ-NP-Za-km-z]{88,90})", MULTILINE)
# check public key field
match = search(regex_pubkey, pubsec_content)
if not match:
raise Exception('Error: Bad format PubSec v1 file, missing public key')
# check signkey field
match = search(regex_signkey, pubsec_content)
if not match:
raise Exception('Error: Bad format PubSec v1 file, missing sec key')
# capture signkey
signkey_hex = match.groups()[0]
# extract seed from signkey
seed = bytes(Base58Encoder.decode(signkey_hex)[0:32])
return cls(seed) | def function[from_pubsec_file, parameter[cls, path]]:
constant[
Return SigningKey instance from Duniter WIF file
:param path: Path to WIF file
]
with call[name[open], parameter[name[path], constant[r]]] begin[:]
variable[pubsec_content] assign[=] call[name[fh].read, parameter[]]
variable[regex_pubkey] assign[=] call[name[compile], parameter[constant[pub: ([1-9A-HJ-NP-Za-km-z]{43,44})], name[MULTILINE]]]
variable[regex_signkey] assign[=] call[name[compile], parameter[constant[sec: ([1-9A-HJ-NP-Za-km-z]{88,90})], name[MULTILINE]]]
variable[match] assign[=] call[name[search], parameter[name[regex_pubkey], name[pubsec_content]]]
if <ast.UnaryOp object at 0x7da20c6e4e20> begin[:]
<ast.Raise object at 0x7da20c6e4b50>
variable[match] assign[=] call[name[search], parameter[name[regex_signkey], name[pubsec_content]]]
if <ast.UnaryOp object at 0x7da18f09ee30> begin[:]
<ast.Raise object at 0x7da18f09f910>
variable[signkey_hex] assign[=] call[call[name[match].groups, parameter[]]][constant[0]]
variable[seed] assign[=] call[name[bytes], parameter[call[call[name[Base58Encoder].decode, parameter[name[signkey_hex]]]][<ast.Slice object at 0x7da18f09e170>]]]
return[call[name[cls], parameter[name[seed]]]] | keyword[def] identifier[from_pubsec_file] ( identifier[cls] : identifier[Type] [ identifier[SigningKeyType] ], identifier[path] : identifier[str] )-> identifier[SigningKeyType] :
literal[string]
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[fh] :
identifier[pubsec_content] = identifier[fh] . identifier[read] ()
identifier[regex_pubkey] = identifier[compile] ( literal[string] , identifier[MULTILINE] )
identifier[regex_signkey] = identifier[compile] ( literal[string] , identifier[MULTILINE] )
identifier[match] = identifier[search] ( identifier[regex_pubkey] , identifier[pubsec_content] )
keyword[if] keyword[not] identifier[match] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[match] = identifier[search] ( identifier[regex_signkey] , identifier[pubsec_content] )
keyword[if] keyword[not] identifier[match] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[signkey_hex] = identifier[match] . identifier[groups] ()[ literal[int] ]
identifier[seed] = identifier[bytes] ( identifier[Base58Encoder] . identifier[decode] ( identifier[signkey_hex] )[ literal[int] : literal[int] ])
keyword[return] identifier[cls] ( identifier[seed] ) | def from_pubsec_file(cls: Type[SigningKeyType], path: str) -> SigningKeyType:
"""
Return SigningKey instance from Duniter WIF file
:param path: Path to WIF file
"""
with open(path, 'r') as fh:
pubsec_content = fh.read() # depends on [control=['with'], data=['fh']]
# line patterns
regex_pubkey = compile('pub: ([1-9A-HJ-NP-Za-km-z]{43,44})', MULTILINE)
regex_signkey = compile('sec: ([1-9A-HJ-NP-Za-km-z]{88,90})', MULTILINE)
# check public key field
match = search(regex_pubkey, pubsec_content)
if not match:
raise Exception('Error: Bad format PubSec v1 file, missing public key') # depends on [control=['if'], data=[]]
# check signkey field
match = search(regex_signkey, pubsec_content)
if not match:
raise Exception('Error: Bad format PubSec v1 file, missing sec key') # depends on [control=['if'], data=[]]
# capture signkey
signkey_hex = match.groups()[0]
# extract seed from signkey
seed = bytes(Base58Encoder.decode(signkey_hex)[0:32])
return cls(seed) |
def get_assets_by_genus_type(self, asset_genus_type):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type`` which does not include assets of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('repository',
collection='Asset',
runtime=self._runtime)
result = collection.find(
dict({'genusTypeId': str(asset_genus_type)},
**self._view_filter())).sort('_id', DESCENDING)
return objects.AssetList(result, runtime=self._runtime, proxy=self._proxy) | def function[get_assets_by_genus_type, parameter[self, asset_genus_type]]:
constant[Gets an ``AssetList`` corresponding to the given asset genus ``Type`` which does not include assets of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[repository]]]
variable[result] assign[=] call[call[name[collection].find, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da18dc07b80>], [<ast.Call object at 0x7da18dc060b0>]]]]]].sort, parameter[constant[_id], name[DESCENDING]]]
return[call[name[objects].AssetList, parameter[name[result]]]] | keyword[def] identifier[get_assets_by_genus_type] ( identifier[self] , identifier[asset_genus_type] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
identifier[result] = identifier[collection] . identifier[find] (
identifier[dict] ({ literal[string] : identifier[str] ( identifier[asset_genus_type] )},
** identifier[self] . identifier[_view_filter] ())). identifier[sort] ( literal[string] , identifier[DESCENDING] )
keyword[return] identifier[objects] . identifier[AssetList] ( identifier[result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ) | def get_assets_by_genus_type(self, asset_genus_type):
"""Gets an ``AssetList`` corresponding to the given asset genus ``Type`` which does not include assets of types derived from the specified ``Type``.
In plenary mode, the returned list contains all known assets or
an error results. Otherwise, the returned list may contain only
those assets that are accessible through this session.
arg: asset_genus_type (osid.type.Type): an asset genus type
return: (osid.repository.AssetList) - the returned ``Asset
list``
raise: NullArgument - ``asset_genus_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_genus_type
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('repository', collection='Asset', runtime=self._runtime)
result = collection.find(dict({'genusTypeId': str(asset_genus_type)}, **self._view_filter())).sort('_id', DESCENDING)
return objects.AssetList(result, runtime=self._runtime, proxy=self._proxy) |
def timesince_or_never(dt, default=None):
"""Call the Django ``timesince`` filter or a given default string.
It returns the string *default* if *dt* is not a valid ``date``
or ``datetime`` object.
When *default* is None, "Never" is returned.
"""
if default is None:
default = _("Never")
if isinstance(dt, datetime.date):
return timesince(dt)
else:
return default | def function[timesince_or_never, parameter[dt, default]]:
constant[Call the Django ``timesince`` filter or a given default string.
It returns the string *default* if *dt* is not a valid ``date``
or ``datetime`` object.
When *default* is None, "Never" is returned.
]
if compare[name[default] is constant[None]] begin[:]
variable[default] assign[=] call[name[_], parameter[constant[Never]]]
if call[name[isinstance], parameter[name[dt], name[datetime].date]] begin[:]
return[call[name[timesince], parameter[name[dt]]]] | keyword[def] identifier[timesince_or_never] ( identifier[dt] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[default] keyword[is] keyword[None] :
identifier[default] = identifier[_] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[dt] , identifier[datetime] . identifier[date] ):
keyword[return] identifier[timesince] ( identifier[dt] )
keyword[else] :
keyword[return] identifier[default] | def timesince_or_never(dt, default=None):
"""Call the Django ``timesince`` filter or a given default string.
It returns the string *default* if *dt* is not a valid ``date``
or ``datetime`` object.
When *default* is None, "Never" is returned.
"""
if default is None:
default = _('Never') # depends on [control=['if'], data=['default']]
if isinstance(dt, datetime.date):
return timesince(dt) # depends on [control=['if'], data=[]]
else:
return default |
def Cov(self):
"""
calculate the covariance matrix of the tips assuming variance
has accumulated along branches of the tree accoriding to the
the provided
Returns
-------
M : (np.array)
covariance matrix with tips arranged standard transersal order.
"""
# accumulate the covariance matrix by adding 'squares'
M = np.zeros((self.N, self.N))
for n in self.tree.find_clades():
if n == self.tree.root:
continue
M[np.meshgrid(n._ii, n._ii)] += self.branch_variance(n)
return M | def function[Cov, parameter[self]]:
constant[
calculate the covariance matrix of the tips assuming variance
has accumulated along branches of the tree accoriding to the
the provided
Returns
-------
M : (np.array)
covariance matrix with tips arranged standard transersal order.
]
variable[M] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Attribute object at 0x7da1b0287c10>, <ast.Attribute object at 0x7da1b0286b60>]]]]
for taget[name[n]] in starred[call[name[self].tree.find_clades, parameter[]]] begin[:]
if compare[name[n] equal[==] name[self].tree.root] begin[:]
continue
<ast.AugAssign object at 0x7da1b02858d0>
return[name[M]] | keyword[def] identifier[Cov] ( identifier[self] ):
literal[string]
identifier[M] = identifier[np] . identifier[zeros] (( identifier[self] . identifier[N] , identifier[self] . identifier[N] ))
keyword[for] identifier[n] keyword[in] identifier[self] . identifier[tree] . identifier[find_clades] ():
keyword[if] identifier[n] == identifier[self] . identifier[tree] . identifier[root] :
keyword[continue]
identifier[M] [ identifier[np] . identifier[meshgrid] ( identifier[n] . identifier[_ii] , identifier[n] . identifier[_ii] )]+= identifier[self] . identifier[branch_variance] ( identifier[n] )
keyword[return] identifier[M] | def Cov(self):
"""
calculate the covariance matrix of the tips assuming variance
has accumulated along branches of the tree accoriding to the
the provided
Returns
-------
M : (np.array)
covariance matrix with tips arranged standard transersal order.
"""
# accumulate the covariance matrix by adding 'squares'
M = np.zeros((self.N, self.N))
for n in self.tree.find_clades():
if n == self.tree.root:
continue # depends on [control=['if'], data=[]]
M[np.meshgrid(n._ii, n._ii)] += self.branch_variance(n) # depends on [control=['for'], data=['n']]
return M |
def ConnectDevice(self, port_path=None, serial=None, default_timeout_ms=None, **kwargs):
"""Convenience function to setup a transport handle for the adb device from
usb path or serial then connect to it.
Args:
port_path: The filename of usb port to use.
serial: The serial number of the device to use.
default_timeout_ms: The default timeout in milliseconds to use.
kwargs: handle: Device handle to use (instance of common.TcpHandle or common.UsbHandle)
banner: Connection banner to pass to the remote device
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
If serial specifies a TCP address:port, then a TCP connection is
used instead of a USB connection.
"""
# If there isnt a handle override (used by tests), build one here
if 'handle' in kwargs:
self._handle = kwargs.pop('handle')
else:
# if necessary, convert serial to a unicode string
if isinstance(serial, (bytes, bytearray)):
serial = serial.decode('utf-8')
if serial and ':' in serial:
self._handle = common.TcpHandle(serial, timeout_ms=default_timeout_ms)
else:
self._handle = common.UsbHandle.FindAndOpen(
DeviceIsAvailable, port_path=port_path, serial=serial,
timeout_ms=default_timeout_ms)
self._Connect(**kwargs)
return self | def function[ConnectDevice, parameter[self, port_path, serial, default_timeout_ms]]:
constant[Convenience function to setup a transport handle for the adb device from
usb path or serial then connect to it.
Args:
port_path: The filename of usb port to use.
serial: The serial number of the device to use.
default_timeout_ms: The default timeout in milliseconds to use.
kwargs: handle: Device handle to use (instance of common.TcpHandle or common.UsbHandle)
banner: Connection banner to pass to the remote device
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
If serial specifies a TCP address:port, then a TCP connection is
used instead of a USB connection.
]
if compare[constant[handle] in name[kwargs]] begin[:]
name[self]._handle assign[=] call[name[kwargs].pop, parameter[constant[handle]]]
call[name[self]._Connect, parameter[]]
return[name[self]] | keyword[def] identifier[ConnectDevice] ( identifier[self] , identifier[port_path] = keyword[None] , identifier[serial] = keyword[None] , identifier[default_timeout_ms] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[self] . identifier[_handle] = identifier[kwargs] . identifier[pop] ( literal[string] )
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[serial] ,( identifier[bytes] , identifier[bytearray] )):
identifier[serial] = identifier[serial] . identifier[decode] ( literal[string] )
keyword[if] identifier[serial] keyword[and] literal[string] keyword[in] identifier[serial] :
identifier[self] . identifier[_handle] = identifier[common] . identifier[TcpHandle] ( identifier[serial] , identifier[timeout_ms] = identifier[default_timeout_ms] )
keyword[else] :
identifier[self] . identifier[_handle] = identifier[common] . identifier[UsbHandle] . identifier[FindAndOpen] (
identifier[DeviceIsAvailable] , identifier[port_path] = identifier[port_path] , identifier[serial] = identifier[serial] ,
identifier[timeout_ms] = identifier[default_timeout_ms] )
identifier[self] . identifier[_Connect] (** identifier[kwargs] )
keyword[return] identifier[self] | def ConnectDevice(self, port_path=None, serial=None, default_timeout_ms=None, **kwargs):
"""Convenience function to setup a transport handle for the adb device from
usb path or serial then connect to it.
Args:
port_path: The filename of usb port to use.
serial: The serial number of the device to use.
default_timeout_ms: The default timeout in milliseconds to use.
kwargs: handle: Device handle to use (instance of common.TcpHandle or common.UsbHandle)
banner: Connection banner to pass to the remote device
rsa_keys: List of AuthSigner subclass instances to be used for
authentication. The device can either accept one of these via the Sign
method, or we will send the result of GetPublicKey from the first one
if the device doesn't accept any of them.
auth_timeout_ms: Timeout to wait for when sending a new public key. This
is only relevant when we send a new public key. The device shows a
dialog and this timeout is how long to wait for that dialog. If used
in automation, this should be low to catch such a case as a failure
quickly; while in interactive settings it should be high to allow
users to accept the dialog. We default to automation here, so it's low
by default.
If serial specifies a TCP address:port, then a TCP connection is
used instead of a USB connection.
"""
# If there isnt a handle override (used by tests), build one here
if 'handle' in kwargs:
self._handle = kwargs.pop('handle') # depends on [control=['if'], data=['kwargs']]
else:
# if necessary, convert serial to a unicode string
if isinstance(serial, (bytes, bytearray)):
serial = serial.decode('utf-8') # depends on [control=['if'], data=[]]
if serial and ':' in serial:
self._handle = common.TcpHandle(serial, timeout_ms=default_timeout_ms) # depends on [control=['if'], data=[]]
else:
self._handle = common.UsbHandle.FindAndOpen(DeviceIsAvailable, port_path=port_path, serial=serial, timeout_ms=default_timeout_ms)
self._Connect(**kwargs)
return self |
def post(self, path, data=None, **kwargs):
"""
HTTP post on the node
"""
if data:
return (yield from self._compute.post("/projects/{}/{}/nodes/{}{}".format(self._project.id, self._node_type, self._id, path), data=data, **kwargs))
else:
return (yield from self._compute.post("/projects/{}/{}/nodes/{}{}".format(self._project.id, self._node_type, self._id, path), **kwargs)) | def function[post, parameter[self, path, data]]:
constant[
HTTP post on the node
]
if name[data] begin[:]
return[<ast.YieldFrom object at 0x7da18dc05c00>] | keyword[def] identifier[post] ( identifier[self] , identifier[path] , identifier[data] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[data] :
keyword[return] ( keyword[yield] keyword[from] identifier[self] . identifier[_compute] . identifier[post] ( literal[string] . identifier[format] ( identifier[self] . identifier[_project] . identifier[id] , identifier[self] . identifier[_node_type] , identifier[self] . identifier[_id] , identifier[path] ), identifier[data] = identifier[data] ,** identifier[kwargs] ))
keyword[else] :
keyword[return] ( keyword[yield] keyword[from] identifier[self] . identifier[_compute] . identifier[post] ( literal[string] . identifier[format] ( identifier[self] . identifier[_project] . identifier[id] , identifier[self] . identifier[_node_type] , identifier[self] . identifier[_id] , identifier[path] ),** identifier[kwargs] )) | def post(self, path, data=None, **kwargs):
"""
HTTP post on the node
"""
if data:
return (yield from self._compute.post('/projects/{}/{}/nodes/{}{}'.format(self._project.id, self._node_type, self._id, path), data=data, **kwargs)) # depends on [control=['if'], data=[]]
else:
return (yield from self._compute.post('/projects/{}/{}/nodes/{}{}'.format(self._project.id, self._node_type, self._id, path), **kwargs)) |
def category_axis(self):
"""
The category axis of this chart. In the case of an XY or Bubble
chart, this is the X axis. Raises |ValueError| if no category
axis is defined (as is the case for a pie chart, for example).
"""
catAx_lst = self._chartSpace.catAx_lst
if catAx_lst:
return CategoryAxis(catAx_lst[0])
dateAx_lst = self._chartSpace.dateAx_lst
if dateAx_lst:
return DateAxis(dateAx_lst[0])
valAx_lst = self._chartSpace.valAx_lst
if valAx_lst:
return ValueAxis(valAx_lst[0])
raise ValueError('chart has no category axis') | def function[category_axis, parameter[self]]:
constant[
The category axis of this chart. In the case of an XY or Bubble
chart, this is the X axis. Raises |ValueError| if no category
axis is defined (as is the case for a pie chart, for example).
]
variable[catAx_lst] assign[=] name[self]._chartSpace.catAx_lst
if name[catAx_lst] begin[:]
return[call[name[CategoryAxis], parameter[call[name[catAx_lst]][constant[0]]]]]
variable[dateAx_lst] assign[=] name[self]._chartSpace.dateAx_lst
if name[dateAx_lst] begin[:]
return[call[name[DateAxis], parameter[call[name[dateAx_lst]][constant[0]]]]]
variable[valAx_lst] assign[=] name[self]._chartSpace.valAx_lst
if name[valAx_lst] begin[:]
return[call[name[ValueAxis], parameter[call[name[valAx_lst]][constant[0]]]]]
<ast.Raise object at 0x7da20c6ab940> | keyword[def] identifier[category_axis] ( identifier[self] ):
literal[string]
identifier[catAx_lst] = identifier[self] . identifier[_chartSpace] . identifier[catAx_lst]
keyword[if] identifier[catAx_lst] :
keyword[return] identifier[CategoryAxis] ( identifier[catAx_lst] [ literal[int] ])
identifier[dateAx_lst] = identifier[self] . identifier[_chartSpace] . identifier[dateAx_lst]
keyword[if] identifier[dateAx_lst] :
keyword[return] identifier[DateAxis] ( identifier[dateAx_lst] [ literal[int] ])
identifier[valAx_lst] = identifier[self] . identifier[_chartSpace] . identifier[valAx_lst]
keyword[if] identifier[valAx_lst] :
keyword[return] identifier[ValueAxis] ( identifier[valAx_lst] [ literal[int] ])
keyword[raise] identifier[ValueError] ( literal[string] ) | def category_axis(self):
"""
The category axis of this chart. In the case of an XY or Bubble
chart, this is the X axis. Raises |ValueError| if no category
axis is defined (as is the case for a pie chart, for example).
"""
catAx_lst = self._chartSpace.catAx_lst
if catAx_lst:
return CategoryAxis(catAx_lst[0]) # depends on [control=['if'], data=[]]
dateAx_lst = self._chartSpace.dateAx_lst
if dateAx_lst:
return DateAxis(dateAx_lst[0]) # depends on [control=['if'], data=[]]
valAx_lst = self._chartSpace.valAx_lst
if valAx_lst:
return ValueAxis(valAx_lst[0]) # depends on [control=['if'], data=[]]
raise ValueError('chart has no category axis') |
def get(self, batch_id, **queryparams):
"""
Get the status of a batch request.
:param batch_id: The unique id for the batch operation.
:type batch_id: :py:class:`str`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
"""
self.batch_id = batch_id
self.operation_status = None
return self._mc_client._get(url=self._build_path(batch_id), **queryparams) | def function[get, parameter[self, batch_id]]:
constant[
Get the status of a batch request.
:param batch_id: The unique id for the batch operation.
:type batch_id: :py:class:`str`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
]
name[self].batch_id assign[=] name[batch_id]
name[self].operation_status assign[=] constant[None]
return[call[name[self]._mc_client._get, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[batch_id] ,** identifier[queryparams] ):
literal[string]
identifier[self] . identifier[batch_id] = identifier[batch_id]
identifier[self] . identifier[operation_status] = keyword[None]
keyword[return] identifier[self] . identifier[_mc_client] . identifier[_get] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[batch_id] ),** identifier[queryparams] ) | def get(self, batch_id, **queryparams):
"""
Get the status of a batch request.
:param batch_id: The unique id for the batch operation.
:type batch_id: :py:class:`str`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
"""
self.batch_id = batch_id
self.operation_status = None
return self._mc_client._get(url=self._build_path(batch_id), **queryparams) |
def _init_optional_attrs(optional_attrs):
"""Create OboOptionalAttrs or return None."""
if optional_attrs is None:
return None
opts = OboOptionalAttrs.get_optional_attrs(optional_attrs)
if opts:
return OboOptionalAttrs(opts) | def function[_init_optional_attrs, parameter[optional_attrs]]:
constant[Create OboOptionalAttrs or return None.]
if compare[name[optional_attrs] is constant[None]] begin[:]
return[constant[None]]
variable[opts] assign[=] call[name[OboOptionalAttrs].get_optional_attrs, parameter[name[optional_attrs]]]
if name[opts] begin[:]
return[call[name[OboOptionalAttrs], parameter[name[opts]]]] | keyword[def] identifier[_init_optional_attrs] ( identifier[optional_attrs] ):
literal[string]
keyword[if] identifier[optional_attrs] keyword[is] keyword[None] :
keyword[return] keyword[None]
identifier[opts] = identifier[OboOptionalAttrs] . identifier[get_optional_attrs] ( identifier[optional_attrs] )
keyword[if] identifier[opts] :
keyword[return] identifier[OboOptionalAttrs] ( identifier[opts] ) | def _init_optional_attrs(optional_attrs):
"""Create OboOptionalAttrs or return None."""
if optional_attrs is None:
return None # depends on [control=['if'], data=[]]
opts = OboOptionalAttrs.get_optional_attrs(optional_attrs)
if opts:
return OboOptionalAttrs(opts) # depends on [control=['if'], data=[]] |
def _goargs(self, ret, go_args):
"""Get GO IDs and colors for GO IDs from the GO ID runtime arguments."""
goids = set()
go2color = {}
# Match on "GO ID" or "GO ID and color"
re_gocolor = re.compile(r'(GO:\d{7})((?:#[0-9a-fA-F]{6})?)')
for go_arg in go_args:
mtch = re_gocolor.match(go_arg)
if mtch:
goid, color = mtch.groups()
goids.add(goid)
if color:
go2color[goid] = color
else:
print("WARNING: UNRECOGNIZED ARG({})".format(go_arg))
self._update_ret(ret, goids, go2color) | def function[_goargs, parameter[self, ret, go_args]]:
constant[Get GO IDs and colors for GO IDs from the GO ID runtime arguments.]
variable[goids] assign[=] call[name[set], parameter[]]
variable[go2color] assign[=] dictionary[[], []]
variable[re_gocolor] assign[=] call[name[re].compile, parameter[constant[(GO:\d{7})((?:#[0-9a-fA-F]{6})?)]]]
for taget[name[go_arg]] in starred[name[go_args]] begin[:]
variable[mtch] assign[=] call[name[re_gocolor].match, parameter[name[go_arg]]]
if name[mtch] begin[:]
<ast.Tuple object at 0x7da20c6a9cf0> assign[=] call[name[mtch].groups, parameter[]]
call[name[goids].add, parameter[name[goid]]]
if name[color] begin[:]
call[name[go2color]][name[goid]] assign[=] name[color]
call[name[self]._update_ret, parameter[name[ret], name[goids], name[go2color]]] | keyword[def] identifier[_goargs] ( identifier[self] , identifier[ret] , identifier[go_args] ):
literal[string]
identifier[goids] = identifier[set] ()
identifier[go2color] ={}
identifier[re_gocolor] = identifier[re] . identifier[compile] ( literal[string] )
keyword[for] identifier[go_arg] keyword[in] identifier[go_args] :
identifier[mtch] = identifier[re_gocolor] . identifier[match] ( identifier[go_arg] )
keyword[if] identifier[mtch] :
identifier[goid] , identifier[color] = identifier[mtch] . identifier[groups] ()
identifier[goids] . identifier[add] ( identifier[goid] )
keyword[if] identifier[color] :
identifier[go2color] [ identifier[goid] ]= identifier[color]
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[go_arg] ))
identifier[self] . identifier[_update_ret] ( identifier[ret] , identifier[goids] , identifier[go2color] ) | def _goargs(self, ret, go_args):
"""Get GO IDs and colors for GO IDs from the GO ID runtime arguments."""
goids = set()
go2color = {}
# Match on "GO ID" or "GO ID and color"
re_gocolor = re.compile('(GO:\\d{7})((?:#[0-9a-fA-F]{6})?)')
for go_arg in go_args:
mtch = re_gocolor.match(go_arg)
if mtch:
(goid, color) = mtch.groups()
goids.add(goid)
if color:
go2color[goid] = color # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('WARNING: UNRECOGNIZED ARG({})'.format(go_arg)) # depends on [control=['for'], data=['go_arg']]
self._update_ret(ret, goids, go2color) |
def _indirect_jump_resolved(self, jump, jump_addr, resolved_by, targets):
"""
Called when an indirect jump is successfully resolved.
:param IndirectJump jump: The resolved indirect jump, or None if an IndirectJump instance is
not available.
:param int jump_addr: Address of the resolved indirect jump.
:param IndirectJumpResolver resolved_by: The resolver used to resolve this indirect jump.
:param list targets: List of indirect jump targets.
:param CFGJob job: The job at the start of the block containing the indirect jump.
:return: None
"""
addr = jump.addr if jump is not None else jump_addr
l.debug('The indirect jump at %#x is successfully resolved by %s. It has %d targets.', addr, resolved_by, len(targets))
self.kb.resolved_indirect_jumps.add(addr) | def function[_indirect_jump_resolved, parameter[self, jump, jump_addr, resolved_by, targets]]:
constant[
Called when an indirect jump is successfully resolved.
:param IndirectJump jump: The resolved indirect jump, or None if an IndirectJump instance is
not available.
:param int jump_addr: Address of the resolved indirect jump.
:param IndirectJumpResolver resolved_by: The resolver used to resolve this indirect jump.
:param list targets: List of indirect jump targets.
:param CFGJob job: The job at the start of the block containing the indirect jump.
:return: None
]
variable[addr] assign[=] <ast.IfExp object at 0x7da18c4cfb50>
call[name[l].debug, parameter[constant[The indirect jump at %#x is successfully resolved by %s. It has %d targets.], name[addr], name[resolved_by], call[name[len], parameter[name[targets]]]]]
call[name[self].kb.resolved_indirect_jumps.add, parameter[name[addr]]] | keyword[def] identifier[_indirect_jump_resolved] ( identifier[self] , identifier[jump] , identifier[jump_addr] , identifier[resolved_by] , identifier[targets] ):
literal[string]
identifier[addr] = identifier[jump] . identifier[addr] keyword[if] identifier[jump] keyword[is] keyword[not] keyword[None] keyword[else] identifier[jump_addr]
identifier[l] . identifier[debug] ( literal[string] , identifier[addr] , identifier[resolved_by] , identifier[len] ( identifier[targets] ))
identifier[self] . identifier[kb] . identifier[resolved_indirect_jumps] . identifier[add] ( identifier[addr] ) | def _indirect_jump_resolved(self, jump, jump_addr, resolved_by, targets):
"""
Called when an indirect jump is successfully resolved.
:param IndirectJump jump: The resolved indirect jump, or None if an IndirectJump instance is
not available.
:param int jump_addr: Address of the resolved indirect jump.
:param IndirectJumpResolver resolved_by: The resolver used to resolve this indirect jump.
:param list targets: List of indirect jump targets.
:param CFGJob job: The job at the start of the block containing the indirect jump.
:return: None
"""
addr = jump.addr if jump is not None else jump_addr
l.debug('The indirect jump at %#x is successfully resolved by %s. It has %d targets.', addr, resolved_by, len(targets))
self.kb.resolved_indirect_jumps.add(addr) |
def covstr(strings):
""" convert string to int or float. """
try:
result = int(strings)
except ValueError:
result = float(strings)
return result | def function[covstr, parameter[strings]]:
constant[ convert string to int or float. ]
<ast.Try object at 0x7da1b0761bd0>
return[name[result]] | keyword[def] identifier[covstr] ( identifier[strings] ):
literal[string]
keyword[try] :
identifier[result] = identifier[int] ( identifier[strings] )
keyword[except] identifier[ValueError] :
identifier[result] = identifier[float] ( identifier[strings] )
keyword[return] identifier[result] | def covstr(strings):
""" convert string to int or float. """
try:
result = int(strings) # depends on [control=['try'], data=[]]
except ValueError:
result = float(strings) # depends on [control=['except'], data=[]]
return result |
def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid)
return self.page(course) | def function[GET_AUTH, parameter[self, courseid]]:
constant[ GET request ]
<ast.Tuple object at 0x7da18dc054b0> assign[=] call[name[self].get_course_and_check_rights, parameter[name[courseid]]]
return[call[name[self].page, parameter[name[course]]]] | keyword[def] identifier[GET_AUTH] ( identifier[self] , identifier[courseid] ):
literal[string]
identifier[course] , identifier[__] = identifier[self] . identifier[get_course_and_check_rights] ( identifier[courseid] )
keyword[return] identifier[self] . identifier[page] ( identifier[course] ) | def GET_AUTH(self, courseid): # pylint: disable=arguments-differ
' GET request '
(course, __) = self.get_course_and_check_rights(courseid)
return self.page(course) |
def configure_devel_job(
config_url, rosdistro_name, source_build_name,
repo_name, os_name, os_code_name, arch,
pull_request=False,
config=None, build_file=None,
index=None, dist_file=None, dist_cache=None,
jenkins=None, views=None,
is_disabled=False,
groovy_script=None,
source_repository=None,
build_targets=None,
dry_run=False):
"""
Configure a single Jenkins devel job.
This includes the following steps:
- clone the source repository to use
- clone the ros_buildfarm repository
- write the distribution repository keys into files
- invoke the release/run_devel_job.py script
"""
if config is None:
config = get_config_index(config_url)
if build_file is None:
build_files = get_source_build_files(config, rosdistro_name)
build_file = build_files[source_build_name]
# Overwrite build_file.targets if build_targets is specified
if build_targets is not None:
build_file.targets = build_targets
if index is None:
index = get_index(config.rosdistro_index_url)
if dist_file is None:
dist_file = get_distribution_file(index, rosdistro_name, build_file)
if not dist_file:
raise JobValidationError(
'No distribution file matches the build file')
repo_names = dist_file.repositories.keys()
if repo_name is not None:
if repo_name not in repo_names:
raise JobValidationError(
"Invalid repository name '%s' " % repo_name +
'choose one of the following: %s' %
', '.join(sorted(repo_names)))
repo = dist_file.repositories[repo_name]
if not repo.source_repository:
raise JobValidationError(
"Repository '%s' has no source section" % repo_name)
if not repo.source_repository.version:
raise JobValidationError(
"Repository '%s' has no source version" % repo_name)
source_repository = repo.source_repository
if os_name not in build_file.targets.keys():
raise JobValidationError(
"Invalid OS name '%s' " % os_name +
'choose one of the following: ' +
', '.join(sorted(build_file.targets.keys())))
if os_code_name not in build_file.targets[os_name].keys():
raise JobValidationError(
"Invalid OS code name '%s' " % os_code_name +
'choose one of the following: ' +
', '.join(sorted(build_file.targets[os_name].keys())))
if arch not in build_file.targets[os_name][os_code_name]:
raise JobValidationError(
"Invalid architecture '%s' " % arch +
'choose one of the following: %s' % ', '.join(sorted(
build_file.targets[os_name][os_code_name])))
if dist_cache is None and build_file.notify_maintainers:
dist_cache = get_distribution_cache(index, rosdistro_name)
if jenkins is None:
from ros_buildfarm.jenkins import connect
jenkins = connect(config.jenkins_url)
if views is None:
view_name = get_devel_view_name(
rosdistro_name, source_build_name, pull_request=pull_request)
configure_devel_view(jenkins, view_name, dry_run=dry_run)
job_name = get_devel_job_name(
rosdistro_name, source_build_name,
repo_name, os_name, os_code_name, arch, pull_request)
job_config = _get_devel_job_config(
index, config, rosdistro_name, source_build_name,
build_file, os_name, os_code_name, arch, source_repository,
repo_name, pull_request, job_name, dist_cache=dist_cache,
is_disabled=is_disabled)
# jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
if isinstance(jenkins, object) and jenkins is not False:
from ros_buildfarm.jenkins import configure_job
configure_job(jenkins, job_name, job_config, dry_run=dry_run)
return job_name, job_config | def function[configure_devel_job, parameter[config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request, config, build_file, index, dist_file, dist_cache, jenkins, views, is_disabled, groovy_script, source_repository, build_targets, dry_run]]:
constant[
Configure a single Jenkins devel job.
This includes the following steps:
- clone the source repository to use
- clone the ros_buildfarm repository
- write the distribution repository keys into files
- invoke the release/run_devel_job.py script
]
if compare[name[config] is constant[None]] begin[:]
variable[config] assign[=] call[name[get_config_index], parameter[name[config_url]]]
if compare[name[build_file] is constant[None]] begin[:]
variable[build_files] assign[=] call[name[get_source_build_files], parameter[name[config], name[rosdistro_name]]]
variable[build_file] assign[=] call[name[build_files]][name[source_build_name]]
if compare[name[build_targets] is_not constant[None]] begin[:]
name[build_file].targets assign[=] name[build_targets]
if compare[name[index] is constant[None]] begin[:]
variable[index] assign[=] call[name[get_index], parameter[name[config].rosdistro_index_url]]
if compare[name[dist_file] is constant[None]] begin[:]
variable[dist_file] assign[=] call[name[get_distribution_file], parameter[name[index], name[rosdistro_name], name[build_file]]]
if <ast.UnaryOp object at 0x7da1b009ae60> begin[:]
<ast.Raise object at 0x7da1b009ae00>
variable[repo_names] assign[=] call[name[dist_file].repositories.keys, parameter[]]
if compare[name[repo_name] is_not constant[None]] begin[:]
if compare[name[repo_name] <ast.NotIn object at 0x7da2590d7190> name[repo_names]] begin[:]
<ast.Raise object at 0x7da1b009aa70>
variable[repo] assign[=] call[name[dist_file].repositories][name[repo_name]]
if <ast.UnaryOp object at 0x7da1b009a650> begin[:]
<ast.Raise object at 0x7da1b009a5c0>
if <ast.UnaryOp object at 0x7da1b009a470> begin[:]
<ast.Raise object at 0x7da1b009a3b0>
variable[source_repository] assign[=] name[repo].source_repository
if compare[name[os_name] <ast.NotIn object at 0x7da2590d7190> call[name[build_file].targets.keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b0098e50>
if compare[name[os_code_name] <ast.NotIn object at 0x7da2590d7190> call[call[name[build_file].targets][name[os_name]].keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da1b0098940>
if compare[name[arch] <ast.NotIn object at 0x7da2590d7190> call[call[name[build_file].targets][name[os_name]]][name[os_code_name]]] begin[:]
<ast.Raise object at 0x7da1b00983d0>
if <ast.BoolOp object at 0x7da1b0033be0> begin[:]
variable[dist_cache] assign[=] call[name[get_distribution_cache], parameter[name[index], name[rosdistro_name]]]
if compare[name[jenkins] is constant[None]] begin[:]
from relative_module[ros_buildfarm.jenkins] import module[connect]
variable[jenkins] assign[=] call[name[connect], parameter[name[config].jenkins_url]]
if compare[name[views] is constant[None]] begin[:]
variable[view_name] assign[=] call[name[get_devel_view_name], parameter[name[rosdistro_name], name[source_build_name]]]
call[name[configure_devel_view], parameter[name[jenkins], name[view_name]]]
variable[job_name] assign[=] call[name[get_devel_job_name], parameter[name[rosdistro_name], name[source_build_name], name[repo_name], name[os_name], name[os_code_name], name[arch], name[pull_request]]]
variable[job_config] assign[=] call[name[_get_devel_job_config], parameter[name[index], name[config], name[rosdistro_name], name[source_build_name], name[build_file], name[os_name], name[os_code_name], name[arch], name[source_repository], name[repo_name], name[pull_request], name[job_name]]]
if <ast.BoolOp object at 0x7da1b0030ca0> begin[:]
from relative_module[ros_buildfarm.jenkins] import module[configure_job]
call[name[configure_job], parameter[name[jenkins], name[job_name], name[job_config]]]
return[tuple[[<ast.Name object at 0x7da1b00329e0>, <ast.Name object at 0x7da1b0033160>]]] | keyword[def] identifier[configure_devel_job] (
identifier[config_url] , identifier[rosdistro_name] , identifier[source_build_name] ,
identifier[repo_name] , identifier[os_name] , identifier[os_code_name] , identifier[arch] ,
identifier[pull_request] = keyword[False] ,
identifier[config] = keyword[None] , identifier[build_file] = keyword[None] ,
identifier[index] = keyword[None] , identifier[dist_file] = keyword[None] , identifier[dist_cache] = keyword[None] ,
identifier[jenkins] = keyword[None] , identifier[views] = keyword[None] ,
identifier[is_disabled] = keyword[False] ,
identifier[groovy_script] = keyword[None] ,
identifier[source_repository] = keyword[None] ,
identifier[build_targets] = keyword[None] ,
identifier[dry_run] = keyword[False] ):
literal[string]
keyword[if] identifier[config] keyword[is] keyword[None] :
identifier[config] = identifier[get_config_index] ( identifier[config_url] )
keyword[if] identifier[build_file] keyword[is] keyword[None] :
identifier[build_files] = identifier[get_source_build_files] ( identifier[config] , identifier[rosdistro_name] )
identifier[build_file] = identifier[build_files] [ identifier[source_build_name] ]
keyword[if] identifier[build_targets] keyword[is] keyword[not] keyword[None] :
identifier[build_file] . identifier[targets] = identifier[build_targets]
keyword[if] identifier[index] keyword[is] keyword[None] :
identifier[index] = identifier[get_index] ( identifier[config] . identifier[rosdistro_index_url] )
keyword[if] identifier[dist_file] keyword[is] keyword[None] :
identifier[dist_file] = identifier[get_distribution_file] ( identifier[index] , identifier[rosdistro_name] , identifier[build_file] )
keyword[if] keyword[not] identifier[dist_file] :
keyword[raise] identifier[JobValidationError] (
literal[string] )
identifier[repo_names] = identifier[dist_file] . identifier[repositories] . identifier[keys] ()
keyword[if] identifier[repo_name] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[repo_name] keyword[not] keyword[in] identifier[repo_names] :
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[repo_name] +
literal[string] %
literal[string] . identifier[join] ( identifier[sorted] ( identifier[repo_names] )))
identifier[repo] = identifier[dist_file] . identifier[repositories] [ identifier[repo_name] ]
keyword[if] keyword[not] identifier[repo] . identifier[source_repository] :
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[repo_name] )
keyword[if] keyword[not] identifier[repo] . identifier[source_repository] . identifier[version] :
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[repo_name] )
identifier[source_repository] = identifier[repo] . identifier[source_repository]
keyword[if] identifier[os_name] keyword[not] keyword[in] identifier[build_file] . identifier[targets] . identifier[keys] ():
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[os_name] +
literal[string] +
literal[string] . identifier[join] ( identifier[sorted] ( identifier[build_file] . identifier[targets] . identifier[keys] ())))
keyword[if] identifier[os_code_name] keyword[not] keyword[in] identifier[build_file] . identifier[targets] [ identifier[os_name] ]. identifier[keys] ():
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[os_code_name] +
literal[string] +
literal[string] . identifier[join] ( identifier[sorted] ( identifier[build_file] . identifier[targets] [ identifier[os_name] ]. identifier[keys] ())))
keyword[if] identifier[arch] keyword[not] keyword[in] identifier[build_file] . identifier[targets] [ identifier[os_name] ][ identifier[os_code_name] ]:
keyword[raise] identifier[JobValidationError] (
literal[string] % identifier[arch] +
literal[string] % literal[string] . identifier[join] ( identifier[sorted] (
identifier[build_file] . identifier[targets] [ identifier[os_name] ][ identifier[os_code_name] ])))
keyword[if] identifier[dist_cache] keyword[is] keyword[None] keyword[and] identifier[build_file] . identifier[notify_maintainers] :
identifier[dist_cache] = identifier[get_distribution_cache] ( identifier[index] , identifier[rosdistro_name] )
keyword[if] identifier[jenkins] keyword[is] keyword[None] :
keyword[from] identifier[ros_buildfarm] . identifier[jenkins] keyword[import] identifier[connect]
identifier[jenkins] = identifier[connect] ( identifier[config] . identifier[jenkins_url] )
keyword[if] identifier[views] keyword[is] keyword[None] :
identifier[view_name] = identifier[get_devel_view_name] (
identifier[rosdistro_name] , identifier[source_build_name] , identifier[pull_request] = identifier[pull_request] )
identifier[configure_devel_view] ( identifier[jenkins] , identifier[view_name] , identifier[dry_run] = identifier[dry_run] )
identifier[job_name] = identifier[get_devel_job_name] (
identifier[rosdistro_name] , identifier[source_build_name] ,
identifier[repo_name] , identifier[os_name] , identifier[os_code_name] , identifier[arch] , identifier[pull_request] )
identifier[job_config] = identifier[_get_devel_job_config] (
identifier[index] , identifier[config] , identifier[rosdistro_name] , identifier[source_build_name] ,
identifier[build_file] , identifier[os_name] , identifier[os_code_name] , identifier[arch] , identifier[source_repository] ,
identifier[repo_name] , identifier[pull_request] , identifier[job_name] , identifier[dist_cache] = identifier[dist_cache] ,
identifier[is_disabled] = identifier[is_disabled] )
keyword[if] identifier[isinstance] ( identifier[jenkins] , identifier[object] ) keyword[and] identifier[jenkins] keyword[is] keyword[not] keyword[False] :
keyword[from] identifier[ros_buildfarm] . identifier[jenkins] keyword[import] identifier[configure_job]
identifier[configure_job] ( identifier[jenkins] , identifier[job_name] , identifier[job_config] , identifier[dry_run] = identifier[dry_run] )
keyword[return] identifier[job_name] , identifier[job_config] | def configure_devel_job(config_url, rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request=False, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, source_repository=None, build_targets=None, dry_run=False):
"""
Configure a single Jenkins devel job.
This includes the following steps:
- clone the source repository to use
- clone the ros_buildfarm repository
- write the distribution repository keys into files
- invoke the release/run_devel_job.py script
"""
if config is None:
config = get_config_index(config_url) # depends on [control=['if'], data=['config']]
if build_file is None:
build_files = get_source_build_files(config, rosdistro_name)
build_file = build_files[source_build_name] # depends on [control=['if'], data=['build_file']]
# Overwrite build_file.targets if build_targets is specified
if build_targets is not None:
build_file.targets = build_targets # depends on [control=['if'], data=['build_targets']]
if index is None:
index = get_index(config.rosdistro_index_url) # depends on [control=['if'], data=['index']]
if dist_file is None:
dist_file = get_distribution_file(index, rosdistro_name, build_file)
if not dist_file:
raise JobValidationError('No distribution file matches the build file') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['dist_file']]
repo_names = dist_file.repositories.keys()
if repo_name is not None:
if repo_name not in repo_names:
raise JobValidationError("Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) # depends on [control=['if'], data=['repo_name', 'repo_names']]
repo = dist_file.repositories[repo_name]
if not repo.source_repository:
raise JobValidationError("Repository '%s' has no source section" % repo_name) # depends on [control=['if'], data=[]]
if not repo.source_repository.version:
raise JobValidationError("Repository '%s' has no source version" % repo_name) # depends on [control=['if'], data=[]]
source_repository = repo.source_repository # depends on [control=['if'], data=['repo_name']]
if os_name not in build_file.targets.keys():
raise JobValidationError("Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) # depends on [control=['if'], data=['os_name']]
if os_code_name not in build_file.targets[os_name].keys():
raise JobValidationError("Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) # depends on [control=['if'], data=['os_code_name']]
if arch not in build_file.targets[os_name][os_code_name]:
raise JobValidationError("Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted(build_file.targets[os_name][os_code_name]))) # depends on [control=['if'], data=['arch']]
if dist_cache is None and build_file.notify_maintainers:
dist_cache = get_distribution_cache(index, rosdistro_name) # depends on [control=['if'], data=[]]
if jenkins is None:
from ros_buildfarm.jenkins import connect
jenkins = connect(config.jenkins_url) # depends on [control=['if'], data=['jenkins']]
if views is None:
view_name = get_devel_view_name(rosdistro_name, source_build_name, pull_request=pull_request)
configure_devel_view(jenkins, view_name, dry_run=dry_run) # depends on [control=['if'], data=[]]
job_name = get_devel_job_name(rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request)
job_config = _get_devel_job_config(index, config, rosdistro_name, source_build_name, build_file, os_name, os_code_name, arch, source_repository, repo_name, pull_request, job_name, dist_cache=dist_cache, is_disabled=is_disabled)
# jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero
if isinstance(jenkins, object) and jenkins is not False:
from ros_buildfarm.jenkins import configure_job
configure_job(jenkins, job_name, job_config, dry_run=dry_run) # depends on [control=['if'], data=[]]
return (job_name, job_config) |
def exec_nb_cmd(self, cmd):
'''
Yield None until cmd finished
'''
r_out = []
r_err = []
rcode = None
cmd = self._cmd_str(cmd)
logmsg = 'Executing non-blocking command: {0}'.format(cmd)
if self.passwd:
logmsg = logmsg.replace(self.passwd, ('*' * 6))
log.debug(logmsg)
for out, err, rcode in self._run_nb_cmd(cmd):
if out is not None:
r_out.append(out)
if err is not None:
r_err.append(err)
yield None, None, None
yield ''.join(r_out), ''.join(r_err), rcode | def function[exec_nb_cmd, parameter[self, cmd]]:
constant[
Yield None until cmd finished
]
variable[r_out] assign[=] list[[]]
variable[r_err] assign[=] list[[]]
variable[rcode] assign[=] constant[None]
variable[cmd] assign[=] call[name[self]._cmd_str, parameter[name[cmd]]]
variable[logmsg] assign[=] call[constant[Executing non-blocking command: {0}].format, parameter[name[cmd]]]
if name[self].passwd begin[:]
variable[logmsg] assign[=] call[name[logmsg].replace, parameter[name[self].passwd, binary_operation[constant[*] * constant[6]]]]
call[name[log].debug, parameter[name[logmsg]]]
for taget[tuple[[<ast.Name object at 0x7da1b21953f0>, <ast.Name object at 0x7da1b21970d0>, <ast.Name object at 0x7da1b2195390>]]] in starred[call[name[self]._run_nb_cmd, parameter[name[cmd]]]] begin[:]
if compare[name[out] is_not constant[None]] begin[:]
call[name[r_out].append, parameter[name[out]]]
if compare[name[err] is_not constant[None]] begin[:]
call[name[r_err].append, parameter[name[err]]]
<ast.Yield object at 0x7da1b2195630>
<ast.Yield object at 0x7da1b2195e70> | keyword[def] identifier[exec_nb_cmd] ( identifier[self] , identifier[cmd] ):
literal[string]
identifier[r_out] =[]
identifier[r_err] =[]
identifier[rcode] = keyword[None]
identifier[cmd] = identifier[self] . identifier[_cmd_str] ( identifier[cmd] )
identifier[logmsg] = literal[string] . identifier[format] ( identifier[cmd] )
keyword[if] identifier[self] . identifier[passwd] :
identifier[logmsg] = identifier[logmsg] . identifier[replace] ( identifier[self] . identifier[passwd] ,( literal[string] * literal[int] ))
identifier[log] . identifier[debug] ( identifier[logmsg] )
keyword[for] identifier[out] , identifier[err] , identifier[rcode] keyword[in] identifier[self] . identifier[_run_nb_cmd] ( identifier[cmd] ):
keyword[if] identifier[out] keyword[is] keyword[not] keyword[None] :
identifier[r_out] . identifier[append] ( identifier[out] )
keyword[if] identifier[err] keyword[is] keyword[not] keyword[None] :
identifier[r_err] . identifier[append] ( identifier[err] )
keyword[yield] keyword[None] , keyword[None] , keyword[None]
keyword[yield] literal[string] . identifier[join] ( identifier[r_out] ), literal[string] . identifier[join] ( identifier[r_err] ), identifier[rcode] | def exec_nb_cmd(self, cmd):
"""
Yield None until cmd finished
"""
r_out = []
r_err = []
rcode = None
cmd = self._cmd_str(cmd)
logmsg = 'Executing non-blocking command: {0}'.format(cmd)
if self.passwd:
logmsg = logmsg.replace(self.passwd, '*' * 6) # depends on [control=['if'], data=[]]
log.debug(logmsg)
for (out, err, rcode) in self._run_nb_cmd(cmd):
if out is not None:
r_out.append(out) # depends on [control=['if'], data=['out']]
if err is not None:
r_err.append(err) # depends on [control=['if'], data=['err']]
yield (None, None, None) # depends on [control=['for'], data=[]]
yield (''.join(r_out), ''.join(r_err), rcode) |
def get_asset_contents(self):
"""Gets the content of this asset.
return: (osid.repository.AssetContentList) - the asset contents
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.repository.Asset.get_asset_contents_template
return AssetContentList(
self._my_map['assetContents'],
runtime=self._runtime,
proxy=self._proxy) | def function[get_asset_contents, parameter[self]]:
constant[Gets the content of this asset.
return: (osid.repository.AssetContentList) - the asset contents
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
]
return[call[name[AssetContentList], parameter[call[name[self]._my_map][constant[assetContents]]]]] | keyword[def] identifier[get_asset_contents] ( identifier[self] ):
literal[string]
keyword[return] identifier[AssetContentList] (
identifier[self] . identifier[_my_map] [ literal[string] ],
identifier[runtime] = identifier[self] . identifier[_runtime] ,
identifier[proxy] = identifier[self] . identifier[_proxy] ) | def get_asset_contents(self):
"""Gets the content of this asset.
return: (osid.repository.AssetContentList) - the asset contents
raise: OperationFailed - unable to complete request
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.repository.Asset.get_asset_contents_template
return AssetContentList(self._my_map['assetContents'], runtime=self._runtime, proxy=self._proxy) |
def get_block_info(self):
"""
Get the retrieved block information.
Return [(height, [txs])] on success, ordered on height
Raise if not finished downloading
"""
if not self.finished:
raise Exception("Not finished downloading")
ret = []
for (block_hash, block_data) in self.block_info.items():
ret.append( (block_data['height'], block_data['txns']) )
return ret | def function[get_block_info, parameter[self]]:
constant[
Get the retrieved block information.
Return [(height, [txs])] on success, ordered on height
Raise if not finished downloading
]
if <ast.UnaryOp object at 0x7da1b26ca4a0> begin[:]
<ast.Raise object at 0x7da1b26cba90>
variable[ret] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b26cbf70>, <ast.Name object at 0x7da1b26c9f30>]]] in starred[call[name[self].block_info.items, parameter[]]] begin[:]
call[name[ret].append, parameter[tuple[[<ast.Subscript object at 0x7da1b26ca500>, <ast.Subscript object at 0x7da1b26c98a0>]]]]
return[name[ret]] | keyword[def] identifier[get_block_info] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[finished] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[ret] =[]
keyword[for] ( identifier[block_hash] , identifier[block_data] ) keyword[in] identifier[self] . identifier[block_info] . identifier[items] ():
identifier[ret] . identifier[append] (( identifier[block_data] [ literal[string] ], identifier[block_data] [ literal[string] ]))
keyword[return] identifier[ret] | def get_block_info(self):
"""
Get the retrieved block information.
Return [(height, [txs])] on success, ordered on height
Raise if not finished downloading
"""
if not self.finished:
raise Exception('Not finished downloading') # depends on [control=['if'], data=[]]
ret = []
for (block_hash, block_data) in self.block_info.items():
ret.append((block_data['height'], block_data['txns'])) # depends on [control=['for'], data=[]]
return ret |
def triangle_address(fx, pt):
'''
triangle_address(FX, P) yields an address coordinate (t,r) for the point P in the triangle
defined by the (3 x d)-sized coordinate matrix FX, in which each row of the matrix is the
d-dimensional vector representing the respective triangle vertx for triangle [A,B,C]. The
resulting coordinates (t,r) (0 <= t <= 1, 0 <= r <= 1) address the point P such that, if t gives
the fraction of the angle from vector AB to vector AC that is made by the angle between vectors
AB and AP, and r gives the fraction ||AP||/||AR|| where R is the point of intersection between
lines AP and BC. If P is a (d x n)-sized matrix of points, then a (2 x n) matrix of addresses
is returned.
'''
fx = np.asarray(fx)
pt = np.asarray(pt)
# The triangle vectors...
ab = fx[1] - fx[0]
ac = fx[2] - fx[0]
bc = fx[2] - fx[1]
ap = np.asarray([pt_i - a_i for (pt_i, a_i) in zip(pt, fx[0])])
# get the unnormalized distance...
r = np.sqrt((ap ** 2).sum(0))
# now we can find the angle...
unit = 1 - r.astype(bool)
t0 = vector_angle(ab, ac)
t = vector_angle(ap + [ab_i * unit for ab_i in ab], ab)
sint = np.sin(t)
sindt = np.sin(t0 - t)
# finding r0 is tricker--we use this fancy formula based on the law of sines
q0 = np.sqrt((bc ** 2).sum(0)) # B->C distance
beta = vector_angle(-ab, bc) # Angle at B
sinGamma = np.sin(math.pi - beta - t0)
sinBeta = np.sin(beta)
r0 = q0 * sinBeta * sinGamma / (sinBeta * sindt + sinGamma * sint)
return np.asarray([t/t0, r/r0]) | def function[triangle_address, parameter[fx, pt]]:
constant[
triangle_address(FX, P) yields an address coordinate (t,r) for the point P in the triangle
defined by the (3 x d)-sized coordinate matrix FX, in which each row of the matrix is the
d-dimensional vector representing the respective triangle vertx for triangle [A,B,C]. The
resulting coordinates (t,r) (0 <= t <= 1, 0 <= r <= 1) address the point P such that, if t gives
the fraction of the angle from vector AB to vector AC that is made by the angle between vectors
AB and AP, and r gives the fraction ||AP||/||AR|| where R is the point of intersection between
lines AP and BC. If P is a (d x n)-sized matrix of points, then a (2 x n) matrix of addresses
is returned.
]
variable[fx] assign[=] call[name[np].asarray, parameter[name[fx]]]
variable[pt] assign[=] call[name[np].asarray, parameter[name[pt]]]
variable[ab] assign[=] binary_operation[call[name[fx]][constant[1]] - call[name[fx]][constant[0]]]
variable[ac] assign[=] binary_operation[call[name[fx]][constant[2]] - call[name[fx]][constant[0]]]
variable[bc] assign[=] binary_operation[call[name[fx]][constant[2]] - call[name[fx]][constant[1]]]
variable[ap] assign[=] call[name[np].asarray, parameter[<ast.ListComp object at 0x7da1b0ebcb80>]]
variable[r] assign[=] call[name[np].sqrt, parameter[call[binary_operation[name[ap] ** constant[2]].sum, parameter[constant[0]]]]]
variable[unit] assign[=] binary_operation[constant[1] - call[name[r].astype, parameter[name[bool]]]]
variable[t0] assign[=] call[name[vector_angle], parameter[name[ab], name[ac]]]
variable[t] assign[=] call[name[vector_angle], parameter[binary_operation[name[ap] + <ast.ListComp object at 0x7da1b0ebd840>], name[ab]]]
variable[sint] assign[=] call[name[np].sin, parameter[name[t]]]
variable[sindt] assign[=] call[name[np].sin, parameter[binary_operation[name[t0] - name[t]]]]
variable[q0] assign[=] call[name[np].sqrt, parameter[call[binary_operation[name[bc] ** constant[2]].sum, parameter[constant[0]]]]]
variable[beta] assign[=] call[name[vector_angle], parameter[<ast.UnaryOp object at 0x7da1b0ebf100>, name[bc]]]
variable[sinGamma] assign[=] call[name[np].sin, parameter[binary_operation[binary_operation[name[math].pi - name[beta]] - name[t0]]]]
variable[sinBeta] assign[=] call[name[np].sin, parameter[name[beta]]]
variable[r0] assign[=] binary_operation[binary_operation[binary_operation[name[q0] * name[sinBeta]] * name[sinGamma]] / binary_operation[binary_operation[name[sinBeta] * name[sindt]] + binary_operation[name[sinGamma] * name[sint]]]]
return[call[name[np].asarray, parameter[list[[<ast.BinOp object at 0x7da1b0ebe860>, <ast.BinOp object at 0x7da1b0ebe740>]]]]] | keyword[def] identifier[triangle_address] ( identifier[fx] , identifier[pt] ):
literal[string]
identifier[fx] = identifier[np] . identifier[asarray] ( identifier[fx] )
identifier[pt] = identifier[np] . identifier[asarray] ( identifier[pt] )
identifier[ab] = identifier[fx] [ literal[int] ]- identifier[fx] [ literal[int] ]
identifier[ac] = identifier[fx] [ literal[int] ]- identifier[fx] [ literal[int] ]
identifier[bc] = identifier[fx] [ literal[int] ]- identifier[fx] [ literal[int] ]
identifier[ap] = identifier[np] . identifier[asarray] ([ identifier[pt_i] - identifier[a_i] keyword[for] ( identifier[pt_i] , identifier[a_i] ) keyword[in] identifier[zip] ( identifier[pt] , identifier[fx] [ literal[int] ])])
identifier[r] = identifier[np] . identifier[sqrt] (( identifier[ap] ** literal[int] ). identifier[sum] ( literal[int] ))
identifier[unit] = literal[int] - identifier[r] . identifier[astype] ( identifier[bool] )
identifier[t0] = identifier[vector_angle] ( identifier[ab] , identifier[ac] )
identifier[t] = identifier[vector_angle] ( identifier[ap] +[ identifier[ab_i] * identifier[unit] keyword[for] identifier[ab_i] keyword[in] identifier[ab] ], identifier[ab] )
identifier[sint] = identifier[np] . identifier[sin] ( identifier[t] )
identifier[sindt] = identifier[np] . identifier[sin] ( identifier[t0] - identifier[t] )
identifier[q0] = identifier[np] . identifier[sqrt] (( identifier[bc] ** literal[int] ). identifier[sum] ( literal[int] ))
identifier[beta] = identifier[vector_angle] (- identifier[ab] , identifier[bc] )
identifier[sinGamma] = identifier[np] . identifier[sin] ( identifier[math] . identifier[pi] - identifier[beta] - identifier[t0] )
identifier[sinBeta] = identifier[np] . identifier[sin] ( identifier[beta] )
identifier[r0] = identifier[q0] * identifier[sinBeta] * identifier[sinGamma] /( identifier[sinBeta] * identifier[sindt] + identifier[sinGamma] * identifier[sint] )
keyword[return] identifier[np] . identifier[asarray] ([ identifier[t] / identifier[t0] , identifier[r] / identifier[r0] ]) | def triangle_address(fx, pt):
"""
triangle_address(FX, P) yields an address coordinate (t,r) for the point P in the triangle
defined by the (3 x d)-sized coordinate matrix FX, in which each row of the matrix is the
d-dimensional vector representing the respective triangle vertx for triangle [A,B,C]. The
resulting coordinates (t,r) (0 <= t <= 1, 0 <= r <= 1) address the point P such that, if t gives
the fraction of the angle from vector AB to vector AC that is made by the angle between vectors
AB and AP, and r gives the fraction ||AP||/||AR|| where R is the point of intersection between
lines AP and BC. If P is a (d x n)-sized matrix of points, then a (2 x n) matrix of addresses
is returned.
"""
fx = np.asarray(fx)
pt = np.asarray(pt)
# The triangle vectors...
ab = fx[1] - fx[0]
ac = fx[2] - fx[0]
bc = fx[2] - fx[1]
ap = np.asarray([pt_i - a_i for (pt_i, a_i) in zip(pt, fx[0])])
# get the unnormalized distance...
r = np.sqrt((ap ** 2).sum(0))
# now we can find the angle...
unit = 1 - r.astype(bool)
t0 = vector_angle(ab, ac)
t = vector_angle(ap + [ab_i * unit for ab_i in ab], ab)
sint = np.sin(t)
sindt = np.sin(t0 - t)
# finding r0 is tricker--we use this fancy formula based on the law of sines
q0 = np.sqrt((bc ** 2).sum(0)) # B->C distance
beta = vector_angle(-ab, bc) # Angle at B
sinGamma = np.sin(math.pi - beta - t0)
sinBeta = np.sin(beta)
r0 = q0 * sinBeta * sinGamma / (sinBeta * sindt + sinGamma * sint)
return np.asarray([t / t0, r / r0]) |
def last_error(self):
"""Get the output of the last command exevuted."""
if not len(self.log):
raise RuntimeError('Nothing executed')
try:
errs = [l for l in self.log if l[1] != 0]
return errs[-1][2]
except IndexError:
# odd case where there were no errors
#TODO
return 'no last error' | def function[last_error, parameter[self]]:
constant[Get the output of the last command exevuted.]
if <ast.UnaryOp object at 0x7da1b021d000> begin[:]
<ast.Raise object at 0x7da1b021c640>
<ast.Try object at 0x7da1b021ffa0> | keyword[def] identifier[last_error] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[len] ( identifier[self] . identifier[log] ):
keyword[raise] identifier[RuntimeError] ( literal[string] )
keyword[try] :
identifier[errs] =[ identifier[l] keyword[for] identifier[l] keyword[in] identifier[self] . identifier[log] keyword[if] identifier[l] [ literal[int] ]!= literal[int] ]
keyword[return] identifier[errs] [- literal[int] ][ literal[int] ]
keyword[except] identifier[IndexError] :
keyword[return] literal[string] | def last_error(self):
"""Get the output of the last command exevuted."""
if not len(self.log):
raise RuntimeError('Nothing executed') # depends on [control=['if'], data=[]]
try:
errs = [l for l in self.log if l[1] != 0]
return errs[-1][2] # depends on [control=['try'], data=[]]
except IndexError:
# odd case where there were no errors
#TODO
return 'no last error' # depends on [control=['except'], data=[]] |
def instantiate_all(graph):
"""
Instantiate all ObjectProxy objects in a nested hierarchy.
Parameters
----------
graph : dict or object
A dictionary (or an ObjectProxy) containing the object graph
loaded from a YAML file.
Returns
-------
graph : dict or object
The dictionary or object resulting after the recursive instantiation.
"""
def should_instantiate(obj):
classes = [ObjectProxy, dict, list]
return True in [isinstance(obj, cls) for cls in classes]
if not isinstance(graph, list):
for key in graph:
if should_instantiate(graph[key]):
graph[key] = instantiate_all(graph[key])
if hasattr(graph, 'keys'):
for key in graph.keys():
if should_instantiate(key):
new_key = instantiate_all(key)
graph[new_key] = graph[key]
del graph[key]
if isinstance(graph, ObjectProxy):
graph = graph.instantiate()
if isinstance(graph, list):
for i, elem in enumerate(graph):
if should_instantiate(elem):
graph[i] = instantiate_all(elem)
return graph | def function[instantiate_all, parameter[graph]]:
constant[
Instantiate all ObjectProxy objects in a nested hierarchy.
Parameters
----------
graph : dict or object
A dictionary (or an ObjectProxy) containing the object graph
loaded from a YAML file.
Returns
-------
graph : dict or object
The dictionary or object resulting after the recursive instantiation.
]
def function[should_instantiate, parameter[obj]]:
variable[classes] assign[=] list[[<ast.Name object at 0x7da204567af0>, <ast.Name object at 0x7da2045664a0>, <ast.Name object at 0x7da204564850>]]
return[compare[constant[True] in <ast.ListComp object at 0x7da2045652d0>]]
if <ast.UnaryOp object at 0x7da18f813ee0> begin[:]
for taget[name[key]] in starred[name[graph]] begin[:]
if call[name[should_instantiate], parameter[call[name[graph]][name[key]]]] begin[:]
call[name[graph]][name[key]] assign[=] call[name[instantiate_all], parameter[call[name[graph]][name[key]]]]
if call[name[hasattr], parameter[name[graph], constant[keys]]] begin[:]
for taget[name[key]] in starred[call[name[graph].keys, parameter[]]] begin[:]
if call[name[should_instantiate], parameter[name[key]]] begin[:]
variable[new_key] assign[=] call[name[instantiate_all], parameter[name[key]]]
call[name[graph]][name[new_key]] assign[=] call[name[graph]][name[key]]
<ast.Delete object at 0x7da18f810e50>
if call[name[isinstance], parameter[name[graph], name[ObjectProxy]]] begin[:]
variable[graph] assign[=] call[name[graph].instantiate, parameter[]]
if call[name[isinstance], parameter[name[graph], name[list]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18f8104c0>, <ast.Name object at 0x7da18f813f10>]]] in starred[call[name[enumerate], parameter[name[graph]]]] begin[:]
if call[name[should_instantiate], parameter[name[elem]]] begin[:]
call[name[graph]][name[i]] assign[=] call[name[instantiate_all], parameter[name[elem]]]
return[name[graph]] | keyword[def] identifier[instantiate_all] ( identifier[graph] ):
literal[string]
keyword[def] identifier[should_instantiate] ( identifier[obj] ):
identifier[classes] =[ identifier[ObjectProxy] , identifier[dict] , identifier[list] ]
keyword[return] keyword[True] keyword[in] [ identifier[isinstance] ( identifier[obj] , identifier[cls] ) keyword[for] identifier[cls] keyword[in] identifier[classes] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[graph] , identifier[list] ):
keyword[for] identifier[key] keyword[in] identifier[graph] :
keyword[if] identifier[should_instantiate] ( identifier[graph] [ identifier[key] ]):
identifier[graph] [ identifier[key] ]= identifier[instantiate_all] ( identifier[graph] [ identifier[key] ])
keyword[if] identifier[hasattr] ( identifier[graph] , literal[string] ):
keyword[for] identifier[key] keyword[in] identifier[graph] . identifier[keys] ():
keyword[if] identifier[should_instantiate] ( identifier[key] ):
identifier[new_key] = identifier[instantiate_all] ( identifier[key] )
identifier[graph] [ identifier[new_key] ]= identifier[graph] [ identifier[key] ]
keyword[del] identifier[graph] [ identifier[key] ]
keyword[if] identifier[isinstance] ( identifier[graph] , identifier[ObjectProxy] ):
identifier[graph] = identifier[graph] . identifier[instantiate] ()
keyword[if] identifier[isinstance] ( identifier[graph] , identifier[list] ):
keyword[for] identifier[i] , identifier[elem] keyword[in] identifier[enumerate] ( identifier[graph] ):
keyword[if] identifier[should_instantiate] ( identifier[elem] ):
identifier[graph] [ identifier[i] ]= identifier[instantiate_all] ( identifier[elem] )
keyword[return] identifier[graph] | def instantiate_all(graph):
"""
Instantiate all ObjectProxy objects in a nested hierarchy.
Parameters
----------
graph : dict or object
A dictionary (or an ObjectProxy) containing the object graph
loaded from a YAML file.
Returns
-------
graph : dict or object
The dictionary or object resulting after the recursive instantiation.
"""
def should_instantiate(obj):
classes = [ObjectProxy, dict, list]
return True in [isinstance(obj, cls) for cls in classes]
if not isinstance(graph, list):
for key in graph:
if should_instantiate(graph[key]):
graph[key] = instantiate_all(graph[key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
if hasattr(graph, 'keys'):
for key in graph.keys():
if should_instantiate(key):
new_key = instantiate_all(key)
graph[new_key] = graph[key]
del graph[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isinstance(graph, ObjectProxy):
graph = graph.instantiate() # depends on [control=['if'], data=[]]
if isinstance(graph, list):
for (i, elem) in enumerate(graph):
if should_instantiate(elem):
graph[i] = instantiate_all(elem) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return graph |
def encode(self, value, length, signed):
'''
:param value: value to encode
:param length: length of value in bits
:param signed: is value signed
'''
if (length % 8 != 0) and self._mode:
raise Exception('cannot use endianess for non bytes aligned int')
pre = '' if signed else 'u'
fmt = '%sint%s:%d=%d' % (pre, self._mode, length, value)
return Bits(fmt) | def function[encode, parameter[self, value, length, signed]]:
constant[
:param value: value to encode
:param length: length of value in bits
:param signed: is value signed
]
if <ast.BoolOp object at 0x7da207f98640> begin[:]
<ast.Raise object at 0x7da207f9a650>
variable[pre] assign[=] <ast.IfExp object at 0x7da207f9a2c0>
variable[fmt] assign[=] binary_operation[constant[%sint%s:%d=%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9b4c0>, <ast.Attribute object at 0x7da207f99330>, <ast.Name object at 0x7da207f99780>, <ast.Name object at 0x7da207f9b490>]]]
return[call[name[Bits], parameter[name[fmt]]]] | keyword[def] identifier[encode] ( identifier[self] , identifier[value] , identifier[length] , identifier[signed] ):
literal[string]
keyword[if] ( identifier[length] % literal[int] != literal[int] ) keyword[and] identifier[self] . identifier[_mode] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[pre] = literal[string] keyword[if] identifier[signed] keyword[else] literal[string]
identifier[fmt] = literal[string] %( identifier[pre] , identifier[self] . identifier[_mode] , identifier[length] , identifier[value] )
keyword[return] identifier[Bits] ( identifier[fmt] ) | def encode(self, value, length, signed):
"""
:param value: value to encode
:param length: length of value in bits
:param signed: is value signed
"""
if length % 8 != 0 and self._mode:
raise Exception('cannot use endianess for non bytes aligned int') # depends on [control=['if'], data=[]]
pre = '' if signed else 'u'
fmt = '%sint%s:%d=%d' % (pre, self._mode, length, value)
return Bits(fmt) |
def recipe_compile(backend, kitchen, recipe, variation):
"""
Apply variables to a Recipe
"""
err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen)
if use_kitchen is None:
raise click.ClickException(err_str)
if recipe is None:
recipe = DKRecipeDisk.find_recipe_name()
if recipe is None:
raise click.ClickException('You must be in a recipe folder, or provide a recipe name.')
click.secho('%s - Get the Compiled OrderRun of Recipe %s.%s in Kitchen %s' % (get_datetime(), recipe, variation, use_kitchen),
fg='green')
check_and_print(DKCloudCommandRunner.get_compiled_serving(backend.dki, use_kitchen, recipe, variation)) | def function[recipe_compile, parameter[backend, kitchen, recipe, variation]]:
constant[
Apply variables to a Recipe
]
<ast.Tuple object at 0x7da18f58d990> assign[=] call[name[Backend].get_kitchen_from_user, parameter[name[kitchen]]]
if compare[name[use_kitchen] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f58e4a0>
if compare[name[recipe] is constant[None]] begin[:]
variable[recipe] assign[=] call[name[DKRecipeDisk].find_recipe_name, parameter[]]
if compare[name[recipe] is constant[None]] begin[:]
<ast.Raise object at 0x7da18f58ceb0>
call[name[click].secho, parameter[binary_operation[constant[%s - Get the Compiled OrderRun of Recipe %s.%s in Kitchen %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58d330>, <ast.Name object at 0x7da18f58fa30>, <ast.Name object at 0x7da18dc04220>, <ast.Name object at 0x7da18dc07ca0>]]]]]
call[name[check_and_print], parameter[call[name[DKCloudCommandRunner].get_compiled_serving, parameter[name[backend].dki, name[use_kitchen], name[recipe], name[variation]]]]] | keyword[def] identifier[recipe_compile] ( identifier[backend] , identifier[kitchen] , identifier[recipe] , identifier[variation] ):
literal[string]
identifier[err_str] , identifier[use_kitchen] = identifier[Backend] . identifier[get_kitchen_from_user] ( identifier[kitchen] )
keyword[if] identifier[use_kitchen] keyword[is] keyword[None] :
keyword[raise] identifier[click] . identifier[ClickException] ( identifier[err_str] )
keyword[if] identifier[recipe] keyword[is] keyword[None] :
identifier[recipe] = identifier[DKRecipeDisk] . identifier[find_recipe_name] ()
keyword[if] identifier[recipe] keyword[is] keyword[None] :
keyword[raise] identifier[click] . identifier[ClickException] ( literal[string] )
identifier[click] . identifier[secho] ( literal[string] %( identifier[get_datetime] (), identifier[recipe] , identifier[variation] , identifier[use_kitchen] ),
identifier[fg] = literal[string] )
identifier[check_and_print] ( identifier[DKCloudCommandRunner] . identifier[get_compiled_serving] ( identifier[backend] . identifier[dki] , identifier[use_kitchen] , identifier[recipe] , identifier[variation] )) | def recipe_compile(backend, kitchen, recipe, variation):
"""
Apply variables to a Recipe
"""
(err_str, use_kitchen) = Backend.get_kitchen_from_user(kitchen)
if use_kitchen is None:
raise click.ClickException(err_str) # depends on [control=['if'], data=[]]
if recipe is None:
recipe = DKRecipeDisk.find_recipe_name()
if recipe is None:
raise click.ClickException('You must be in a recipe folder, or provide a recipe name.') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['recipe']]
click.secho('%s - Get the Compiled OrderRun of Recipe %s.%s in Kitchen %s' % (get_datetime(), recipe, variation, use_kitchen), fg='green')
check_and_print(DKCloudCommandRunner.get_compiled_serving(backend.dki, use_kitchen, recipe, variation)) |
def render_text(text, preformatted=False):
"""Renders an HTML formatted text block with the specified text.
Args:
text: the text to render
preformatted: whether the text should be rendered as preformatted
Returns:
The formatted HTML.
"""
builder = HtmlBuilder()
builder._render_text(text, preformatted=preformatted)
return builder._to_html() | def function[render_text, parameter[text, preformatted]]:
constant[Renders an HTML formatted text block with the specified text.
Args:
text: the text to render
preformatted: whether the text should be rendered as preformatted
Returns:
The formatted HTML.
]
variable[builder] assign[=] call[name[HtmlBuilder], parameter[]]
call[name[builder]._render_text, parameter[name[text]]]
return[call[name[builder]._to_html, parameter[]]] | keyword[def] identifier[render_text] ( identifier[text] , identifier[preformatted] = keyword[False] ):
literal[string]
identifier[builder] = identifier[HtmlBuilder] ()
identifier[builder] . identifier[_render_text] ( identifier[text] , identifier[preformatted] = identifier[preformatted] )
keyword[return] identifier[builder] . identifier[_to_html] () | def render_text(text, preformatted=False):
"""Renders an HTML formatted text block with the specified text.
Args:
text: the text to render
preformatted: whether the text should be rendered as preformatted
Returns:
The formatted HTML.
"""
builder = HtmlBuilder()
builder._render_text(text, preformatted=preformatted)
return builder._to_html() |
def _(pymux, variables):
" Go to previous active window. "
w = pymux.arrangement.get_previous_active_window()
if w:
pymux.arrangement.set_active_window(w) | def function[_, parameter[pymux, variables]]:
constant[ Go to previous active window. ]
variable[w] assign[=] call[name[pymux].arrangement.get_previous_active_window, parameter[]]
if name[w] begin[:]
call[name[pymux].arrangement.set_active_window, parameter[name[w]]] | keyword[def] identifier[_] ( identifier[pymux] , identifier[variables] ):
literal[string]
identifier[w] = identifier[pymux] . identifier[arrangement] . identifier[get_previous_active_window] ()
keyword[if] identifier[w] :
identifier[pymux] . identifier[arrangement] . identifier[set_active_window] ( identifier[w] ) | def _(pymux, variables):
""" Go to previous active window. """
w = pymux.arrangement.get_previous_active_window()
if w:
pymux.arrangement.set_active_window(w) # depends on [control=['if'], data=[]] |
def derivativeY(self,mLvl,pLvl,MedShk):
'''
Evaluate the derivative of consumption and medical care with respect to
permanent income at given levels of market resources, permanent income,
and medical need shocks.
Parameters
----------
mLvl : np.array
Market resource levels.
pLvl : np.array
Permanent income levels; should be same size as mLvl.
MedShk : np.array
Medical need shocks; should be same size as mLvl.
Returns
-------
dcdp : np.array
Derivative of consumption with respect to permanent income for each
point in (xLvl,MedShk).
dMeddp : np.array
Derivative of medical care with respect to permanent income for each
point in (xLvl,MedShk).
'''
xLvl = self.xFunc(mLvl,pLvl,MedShk)
dxdp = self.xFunc.derivativeY(mLvl,pLvl,MedShk)
dcdx = self.cFunc.derivativeX(xLvl,MedShk)
dcdp = dxdp*dcdx
dMeddp = (dxdp - dcdp)/self.MedPrice
return dcdp,dMeddp | def function[derivativeY, parameter[self, mLvl, pLvl, MedShk]]:
constant[
Evaluate the derivative of consumption and medical care with respect to
permanent income at given levels of market resources, permanent income,
and medical need shocks.
Parameters
----------
mLvl : np.array
Market resource levels.
pLvl : np.array
Permanent income levels; should be same size as mLvl.
MedShk : np.array
Medical need shocks; should be same size as mLvl.
Returns
-------
dcdp : np.array
Derivative of consumption with respect to permanent income for each
point in (xLvl,MedShk).
dMeddp : np.array
Derivative of medical care with respect to permanent income for each
point in (xLvl,MedShk).
]
variable[xLvl] assign[=] call[name[self].xFunc, parameter[name[mLvl], name[pLvl], name[MedShk]]]
variable[dxdp] assign[=] call[name[self].xFunc.derivativeY, parameter[name[mLvl], name[pLvl], name[MedShk]]]
variable[dcdx] assign[=] call[name[self].cFunc.derivativeX, parameter[name[xLvl], name[MedShk]]]
variable[dcdp] assign[=] binary_operation[name[dxdp] * name[dcdx]]
variable[dMeddp] assign[=] binary_operation[binary_operation[name[dxdp] - name[dcdp]] / name[self].MedPrice]
return[tuple[[<ast.Name object at 0x7da204346950>, <ast.Name object at 0x7da204345360>]]] | keyword[def] identifier[derivativeY] ( identifier[self] , identifier[mLvl] , identifier[pLvl] , identifier[MedShk] ):
literal[string]
identifier[xLvl] = identifier[self] . identifier[xFunc] ( identifier[mLvl] , identifier[pLvl] , identifier[MedShk] )
identifier[dxdp] = identifier[self] . identifier[xFunc] . identifier[derivativeY] ( identifier[mLvl] , identifier[pLvl] , identifier[MedShk] )
identifier[dcdx] = identifier[self] . identifier[cFunc] . identifier[derivativeX] ( identifier[xLvl] , identifier[MedShk] )
identifier[dcdp] = identifier[dxdp] * identifier[dcdx]
identifier[dMeddp] =( identifier[dxdp] - identifier[dcdp] )/ identifier[self] . identifier[MedPrice]
keyword[return] identifier[dcdp] , identifier[dMeddp] | def derivativeY(self, mLvl, pLvl, MedShk):
"""
Evaluate the derivative of consumption and medical care with respect to
permanent income at given levels of market resources, permanent income,
and medical need shocks.
Parameters
----------
mLvl : np.array
Market resource levels.
pLvl : np.array
Permanent income levels; should be same size as mLvl.
MedShk : np.array
Medical need shocks; should be same size as mLvl.
Returns
-------
dcdp : np.array
Derivative of consumption with respect to permanent income for each
point in (xLvl,MedShk).
dMeddp : np.array
Derivative of medical care with respect to permanent income for each
point in (xLvl,MedShk).
"""
xLvl = self.xFunc(mLvl, pLvl, MedShk)
dxdp = self.xFunc.derivativeY(mLvl, pLvl, MedShk)
dcdx = self.cFunc.derivativeX(xLvl, MedShk)
dcdp = dxdp * dcdx
dMeddp = (dxdp - dcdp) / self.MedPrice
return (dcdp, dMeddp) |
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = "/Applications/Firefox.app/Contents/MacOS/firefox-bin"
# fallback to homebrew installation for mac users
if not os.path.exists(start_cmd):
start_cmd = os.path.expanduser("~") + start_cmd
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
# couldn't find firefox on the system path
raise RuntimeError(
"Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd | def function[_get_firefox_start_cmd, parameter[self]]:
constant[Return the command to start firefox.]
variable[start_cmd] assign[=] constant[]
if compare[call[name[platform].system, parameter[]] equal[==] constant[Darwin]] begin[:]
variable[start_cmd] assign[=] constant[/Applications/Firefox.app/Contents/MacOS/firefox-bin]
if <ast.UnaryOp object at 0x7da1b1e17d30> begin[:]
variable[start_cmd] assign[=] binary_operation[call[name[os].path.expanduser, parameter[constant[~]]] + name[start_cmd]]
return[name[start_cmd]] | keyword[def] identifier[_get_firefox_start_cmd] ( identifier[self] ):
literal[string]
identifier[start_cmd] = literal[string]
keyword[if] identifier[platform] . identifier[system] ()== literal[string] :
identifier[start_cmd] = literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[start_cmd] ):
identifier[start_cmd] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )+ identifier[start_cmd]
keyword[elif] identifier[platform] . identifier[system] ()== literal[string] :
identifier[start_cmd] =( identifier[self] . identifier[_find_exe_in_registry] () keyword[or] identifier[self] . identifier[_default_windows_location] ())
keyword[elif] identifier[platform] . identifier[system] ()== literal[string] keyword[and] identifier[os] . identifier[_name] == literal[string] :
identifier[start_cmd] = identifier[self] . identifier[_default_windows_location] ()
keyword[else] :
keyword[for] identifier[ffname] keyword[in] [ literal[string] , literal[string] ]:
identifier[start_cmd] = identifier[self] . identifier[which] ( identifier[ffname] )
keyword[if] identifier[start_cmd] keyword[is] keyword[not] keyword[None] :
keyword[break]
keyword[else] :
keyword[raise] identifier[RuntimeError] (
literal[string] +
literal[string] )
keyword[return] identifier[start_cmd] | def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ''
if platform.system() == 'Darwin':
start_cmd = '/Applications/Firefox.app/Contents/MacOS/firefox-bin'
# fallback to homebrew installation for mac users
if not os.path.exists(start_cmd):
start_cmd = os.path.expanduser('~') + start_cmd # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif platform.system() == 'Windows':
start_cmd = self._find_exe_in_registry() or self._default_windows_location() # depends on [control=['if'], data=[]]
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location() # depends on [control=['if'], data=[]]
else:
for ffname in ['firefox', 'iceweasel']:
start_cmd = self.which(ffname)
if start_cmd is not None:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ffname']]
else:
# couldn't find firefox on the system path
raise RuntimeError('Could not find firefox in your system PATH.' + ' Please specify the firefox binary location or install firefox')
return start_cmd |
def handle_set_row(self):
"""Read incoming row change from server"""
row = self.reader.int()
logger.info(" -> row: %s", row)
self.controller.row = row | def function[handle_set_row, parameter[self]]:
constant[Read incoming row change from server]
variable[row] assign[=] call[name[self].reader.int, parameter[]]
call[name[logger].info, parameter[constant[ -> row: %s], name[row]]]
name[self].controller.row assign[=] name[row] | keyword[def] identifier[handle_set_row] ( identifier[self] ):
literal[string]
identifier[row] = identifier[self] . identifier[reader] . identifier[int] ()
identifier[logger] . identifier[info] ( literal[string] , identifier[row] )
identifier[self] . identifier[controller] . identifier[row] = identifier[row] | def handle_set_row(self):
"""Read incoming row change from server"""
row = self.reader.int()
logger.info(' -> row: %s', row)
self.controller.row = row |
async def _pause(self, ctx):
""" Pauses/Resumes the current track. """
player = self.bot.lavalink.players.get(ctx.guild.id)
if not player.is_playing:
return await ctx.send('Not playing.')
if player.paused:
await player.set_pause(False)
await ctx.send('⏯ | Resumed')
else:
await player.set_pause(True)
await ctx.send('⏯ | Paused') | <ast.AsyncFunctionDef object at 0x7da1b0109cf0> | keyword[async] keyword[def] identifier[_pause] ( identifier[self] , identifier[ctx] ):
literal[string]
identifier[player] = identifier[self] . identifier[bot] . identifier[lavalink] . identifier[players] . identifier[get] ( identifier[ctx] . identifier[guild] . identifier[id] )
keyword[if] keyword[not] identifier[player] . identifier[is_playing] :
keyword[return] keyword[await] identifier[ctx] . identifier[send] ( literal[string] )
keyword[if] identifier[player] . identifier[paused] :
keyword[await] identifier[player] . identifier[set_pause] ( keyword[False] )
keyword[await] identifier[ctx] . identifier[send] ( literal[string] )
keyword[else] :
keyword[await] identifier[player] . identifier[set_pause] ( keyword[True] )
keyword[await] identifier[ctx] . identifier[send] ( literal[string] ) | async def _pause(self, ctx):
""" Pauses/Resumes the current track. """
player = self.bot.lavalink.players.get(ctx.guild.id)
if not player.is_playing:
return await ctx.send('Not playing.') # depends on [control=['if'], data=[]]
if player.paused:
await player.set_pause(False)
await ctx.send('⏯ | Resumed') # depends on [control=['if'], data=[]]
else:
await player.set_pause(True)
await ctx.send('⏯ | Paused') |
def available(self, context):
"""
Box is available for the calling user.
:param resort.engine.execution.Context context:
Current execution context.
"""
if self.__available is None:
avail = False
name = context.resolve(self.__name)
for box in self.__box_list():
if box["name"] == name and box["version"] == "0":
avail = True
break
self.__available = avail
return self.__available | def function[available, parameter[self, context]]:
constant[
Box is available for the calling user.
:param resort.engine.execution.Context context:
Current execution context.
]
if compare[name[self].__available is constant[None]] begin[:]
variable[avail] assign[=] constant[False]
variable[name] assign[=] call[name[context].resolve, parameter[name[self].__name]]
for taget[name[box]] in starred[call[name[self].__box_list, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b26ac430> begin[:]
variable[avail] assign[=] constant[True]
break
name[self].__available assign[=] name[avail]
return[name[self].__available] | keyword[def] identifier[available] ( identifier[self] , identifier[context] ):
literal[string]
keyword[if] identifier[self] . identifier[__available] keyword[is] keyword[None] :
identifier[avail] = keyword[False]
identifier[name] = identifier[context] . identifier[resolve] ( identifier[self] . identifier[__name] )
keyword[for] identifier[box] keyword[in] identifier[self] . identifier[__box_list] ():
keyword[if] identifier[box] [ literal[string] ]== identifier[name] keyword[and] identifier[box] [ literal[string] ]== literal[string] :
identifier[avail] = keyword[True]
keyword[break]
identifier[self] . identifier[__available] = identifier[avail]
keyword[return] identifier[self] . identifier[__available] | def available(self, context):
"""
Box is available for the calling user.
:param resort.engine.execution.Context context:
Current execution context.
"""
if self.__available is None:
avail = False
name = context.resolve(self.__name)
for box in self.__box_list():
if box['name'] == name and box['version'] == '0':
avail = True
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['box']]
self.__available = avail # depends on [control=['if'], data=[]]
return self.__available |
def fetch_mim_files(api_key, mim2genes=False, mimtitles=False, morbidmap=False, genemap2=False):
"""Fetch the necessary mim files using a api key
Args:
api_key(str): A api key necessary to fetch mim data
Returns:
mim_files(dict): A dictionary with the neccesary files
"""
LOG.info("Fetching OMIM files from https://omim.org/")
mim2genes_url = 'https://omim.org/static/omim/data/mim2gene.txt'
mimtitles_url= 'https://data.omim.org/downloads/{0}/mimTitles.txt'.format(api_key)
morbidmap_url = 'https://data.omim.org/downloads/{0}/morbidmap.txt'.format(api_key)
genemap2_url = 'https://data.omim.org/downloads/{0}/genemap2.txt'.format(api_key)
mim_files = {}
mim_urls = {}
if mim2genes is True:
mim_urls['mim2genes'] = mim2genes_url
if mimtitles is True:
mim_urls['mimtitles'] = mimtitles_url
if morbidmap is True:
mim_urls['morbidmap'] = morbidmap_url
if genemap2 is True:
mim_urls['genemap2'] = genemap2_url
for file_name in mim_urls:
url = mim_urls[file_name]
mim_files[file_name] = fetch_resource(url)
return mim_files | def function[fetch_mim_files, parameter[api_key, mim2genes, mimtitles, morbidmap, genemap2]]:
constant[Fetch the necessary mim files using a api key
Args:
api_key(str): A api key necessary to fetch mim data
Returns:
mim_files(dict): A dictionary with the neccesary files
]
call[name[LOG].info, parameter[constant[Fetching OMIM files from https://omim.org/]]]
variable[mim2genes_url] assign[=] constant[https://omim.org/static/omim/data/mim2gene.txt]
variable[mimtitles_url] assign[=] call[constant[https://data.omim.org/downloads/{0}/mimTitles.txt].format, parameter[name[api_key]]]
variable[morbidmap_url] assign[=] call[constant[https://data.omim.org/downloads/{0}/morbidmap.txt].format, parameter[name[api_key]]]
variable[genemap2_url] assign[=] call[constant[https://data.omim.org/downloads/{0}/genemap2.txt].format, parameter[name[api_key]]]
variable[mim_files] assign[=] dictionary[[], []]
variable[mim_urls] assign[=] dictionary[[], []]
if compare[name[mim2genes] is constant[True]] begin[:]
call[name[mim_urls]][constant[mim2genes]] assign[=] name[mim2genes_url]
if compare[name[mimtitles] is constant[True]] begin[:]
call[name[mim_urls]][constant[mimtitles]] assign[=] name[mimtitles_url]
if compare[name[morbidmap] is constant[True]] begin[:]
call[name[mim_urls]][constant[morbidmap]] assign[=] name[morbidmap_url]
if compare[name[genemap2] is constant[True]] begin[:]
call[name[mim_urls]][constant[genemap2]] assign[=] name[genemap2_url]
for taget[name[file_name]] in starred[name[mim_urls]] begin[:]
variable[url] assign[=] call[name[mim_urls]][name[file_name]]
call[name[mim_files]][name[file_name]] assign[=] call[name[fetch_resource], parameter[name[url]]]
return[name[mim_files]] | keyword[def] identifier[fetch_mim_files] ( identifier[api_key] , identifier[mim2genes] = keyword[False] , identifier[mimtitles] = keyword[False] , identifier[morbidmap] = keyword[False] , identifier[genemap2] = keyword[False] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] )
identifier[mim2genes_url] = literal[string]
identifier[mimtitles_url] = literal[string] . identifier[format] ( identifier[api_key] )
identifier[morbidmap_url] = literal[string] . identifier[format] ( identifier[api_key] )
identifier[genemap2_url] = literal[string] . identifier[format] ( identifier[api_key] )
identifier[mim_files] ={}
identifier[mim_urls] ={}
keyword[if] identifier[mim2genes] keyword[is] keyword[True] :
identifier[mim_urls] [ literal[string] ]= identifier[mim2genes_url]
keyword[if] identifier[mimtitles] keyword[is] keyword[True] :
identifier[mim_urls] [ literal[string] ]= identifier[mimtitles_url]
keyword[if] identifier[morbidmap] keyword[is] keyword[True] :
identifier[mim_urls] [ literal[string] ]= identifier[morbidmap_url]
keyword[if] identifier[genemap2] keyword[is] keyword[True] :
identifier[mim_urls] [ literal[string] ]= identifier[genemap2_url]
keyword[for] identifier[file_name] keyword[in] identifier[mim_urls] :
identifier[url] = identifier[mim_urls] [ identifier[file_name] ]
identifier[mim_files] [ identifier[file_name] ]= identifier[fetch_resource] ( identifier[url] )
keyword[return] identifier[mim_files] | def fetch_mim_files(api_key, mim2genes=False, mimtitles=False, morbidmap=False, genemap2=False):
"""Fetch the necessary mim files using a api key
Args:
api_key(str): A api key necessary to fetch mim data
Returns:
mim_files(dict): A dictionary with the neccesary files
"""
LOG.info('Fetching OMIM files from https://omim.org/')
mim2genes_url = 'https://omim.org/static/omim/data/mim2gene.txt'
mimtitles_url = 'https://data.omim.org/downloads/{0}/mimTitles.txt'.format(api_key)
morbidmap_url = 'https://data.omim.org/downloads/{0}/morbidmap.txt'.format(api_key)
genemap2_url = 'https://data.omim.org/downloads/{0}/genemap2.txt'.format(api_key)
mim_files = {}
mim_urls = {}
if mim2genes is True:
mim_urls['mim2genes'] = mim2genes_url # depends on [control=['if'], data=[]]
if mimtitles is True:
mim_urls['mimtitles'] = mimtitles_url # depends on [control=['if'], data=[]]
if morbidmap is True:
mim_urls['morbidmap'] = morbidmap_url # depends on [control=['if'], data=[]]
if genemap2 is True:
mim_urls['genemap2'] = genemap2_url # depends on [control=['if'], data=[]]
for file_name in mim_urls:
url = mim_urls[file_name]
mim_files[file_name] = fetch_resource(url) # depends on [control=['for'], data=['file_name']]
return mim_files |
def flatten(lst):
"""Flatten list.
Args:
lst (list): List to flatten
Returns:
generator
"""
for elm in lst:
if isinstance(elm, collections.Iterable) and not isinstance(
elm, string_types):
for sub in flatten(elm):
yield sub
else:
yield elm | def function[flatten, parameter[lst]]:
constant[Flatten list.
Args:
lst (list): List to flatten
Returns:
generator
]
for taget[name[elm]] in starred[name[lst]] begin[:]
if <ast.BoolOp object at 0x7da20c6e6b90> begin[:]
for taget[name[sub]] in starred[call[name[flatten], parameter[name[elm]]]] begin[:]
<ast.Yield object at 0x7da1aff03010> | keyword[def] identifier[flatten] ( identifier[lst] ):
literal[string]
keyword[for] identifier[elm] keyword[in] identifier[lst] :
keyword[if] identifier[isinstance] ( identifier[elm] , identifier[collections] . identifier[Iterable] ) keyword[and] keyword[not] identifier[isinstance] (
identifier[elm] , identifier[string_types] ):
keyword[for] identifier[sub] keyword[in] identifier[flatten] ( identifier[elm] ):
keyword[yield] identifier[sub]
keyword[else] :
keyword[yield] identifier[elm] | def flatten(lst):
"""Flatten list.
Args:
lst (list): List to flatten
Returns:
generator
"""
for elm in lst:
if isinstance(elm, collections.Iterable) and (not isinstance(elm, string_types)):
for sub in flatten(elm):
yield sub # depends on [control=['for'], data=['sub']] # depends on [control=['if'], data=[]]
else:
yield elm # depends on [control=['for'], data=['elm']] |
def pop(self, name, default=SENTINEL):
"""Retrieve and remove a value from the backing store, optionally with a default."""
if default is SENTINEL:
return self.__data__.pop(name)
return self.__data__.pop(name, default) | def function[pop, parameter[self, name, default]]:
constant[Retrieve and remove a value from the backing store, optionally with a default.]
if compare[name[default] is name[SENTINEL]] begin[:]
return[call[name[self].__data__.pop, parameter[name[name]]]]
return[call[name[self].__data__.pop, parameter[name[name], name[default]]]] | keyword[def] identifier[pop] ( identifier[self] , identifier[name] , identifier[default] = identifier[SENTINEL] ):
literal[string]
keyword[if] identifier[default] keyword[is] identifier[SENTINEL] :
keyword[return] identifier[self] . identifier[__data__] . identifier[pop] ( identifier[name] )
keyword[return] identifier[self] . identifier[__data__] . identifier[pop] ( identifier[name] , identifier[default] ) | def pop(self, name, default=SENTINEL):
"""Retrieve and remove a value from the backing store, optionally with a default."""
if default is SENTINEL:
return self.__data__.pop(name) # depends on [control=['if'], data=[]]
return self.__data__.pop(name, default) |
def opHaltStatusWS(symbols=None, on_data=None):
'''https://iextrading.com/developer/docs/#operational-halt-status'''
symbols = _strToList(symbols)
sendinit = ({'symbols': symbols, 'channels': ['ophaltstatus']},)
return _stream(_wsURL('deep'), sendinit, on_data) | def function[opHaltStatusWS, parameter[symbols, on_data]]:
constant[https://iextrading.com/developer/docs/#operational-halt-status]
variable[symbols] assign[=] call[name[_strToList], parameter[name[symbols]]]
variable[sendinit] assign[=] tuple[[<ast.Dict object at 0x7da1b0150580>]]
return[call[name[_stream], parameter[call[name[_wsURL], parameter[constant[deep]]], name[sendinit], name[on_data]]]] | keyword[def] identifier[opHaltStatusWS] ( identifier[symbols] = keyword[None] , identifier[on_data] = keyword[None] ):
literal[string]
identifier[symbols] = identifier[_strToList] ( identifier[symbols] )
identifier[sendinit] =({ literal[string] : identifier[symbols] , literal[string] :[ literal[string] ]},)
keyword[return] identifier[_stream] ( identifier[_wsURL] ( literal[string] ), identifier[sendinit] , identifier[on_data] ) | def opHaltStatusWS(symbols=None, on_data=None):
"""https://iextrading.com/developer/docs/#operational-halt-status"""
symbols = _strToList(symbols)
sendinit = ({'symbols': symbols, 'channels': ['ophaltstatus']},)
return _stream(_wsURL('deep'), sendinit, on_data) |
def from_mask(cls, dh_mask, lmax, nmax=None):
"""
Construct Slepian functions that are optimally concentrated within
the region specified by a mask.
Usage
-----
x = Slepian.from_mask(dh_mask, lmax, [nmax])
Returns
-------
x : Slepian class instance
Parameters
----------
dh_mask :ndarray, shape (nlat, nlon)
A Driscoll and Healy (1994) sampled grid describing the
concentration region R. All elements should either be 1 (for inside
the concentration region) or 0 (for outside the concentration
region). The grid must have dimensions nlon=nlat or nlon=2*nlat,
where nlat is even.
lmax : int
The spherical harmonic bandwidth of the Slepian functions.
nmax : int, optional, default = (lmax+1)**2
The number of best-concentrated eigenvalues and eigenfunctions to
return.
"""
if nmax is None:
nmax = (lmax + 1)**2
else:
if nmax > (lmax + 1)**2:
raise ValueError('nmax must be less than or equal to ' +
'(lmax + 1)**2. lmax = {:d} and nmax = {:d}'
.format(lmax, nmax))
if dh_mask.shape[0] % 2 != 0:
raise ValueError('The number of latitude bands in dh_mask ' +
'must be even. nlat = {:d}'
.format(dh_mask.shape[0]))
if dh_mask.shape[1] == dh_mask.shape[0]:
_sampling = 1
elif dh_mask.shape[1] == 2 * dh_mask.shape[0]:
_sampling = 2
else:
raise ValueError('dh_mask must be dimensioned as (n, n) or ' +
'(n, 2 * n). Input shape is ({:d}, {:d})'
.format(dh_mask.shape[0], dh_mask.shape[1]))
mask_lm = _shtools.SHExpandDH(dh_mask, sampling=_sampling, lmax_calc=0)
area = mask_lm[0, 0, 0] * 4 * _np.pi
tapers, eigenvalues = _shtools.SHReturnTapersMap(dh_mask, lmax,
ntapers=nmax)
return SlepianMask(tapers, eigenvalues, area, copy=False) | def function[from_mask, parameter[cls, dh_mask, lmax, nmax]]:
constant[
Construct Slepian functions that are optimally concentrated within
the region specified by a mask.
Usage
-----
x = Slepian.from_mask(dh_mask, lmax, [nmax])
Returns
-------
x : Slepian class instance
Parameters
----------
dh_mask :ndarray, shape (nlat, nlon)
A Driscoll and Healy (1994) sampled grid describing the
concentration region R. All elements should either be 1 (for inside
the concentration region) or 0 (for outside the concentration
region). The grid must have dimensions nlon=nlat or nlon=2*nlat,
where nlat is even.
lmax : int
The spherical harmonic bandwidth of the Slepian functions.
nmax : int, optional, default = (lmax+1)**2
The number of best-concentrated eigenvalues and eigenfunctions to
return.
]
if compare[name[nmax] is constant[None]] begin[:]
variable[nmax] assign[=] binary_operation[binary_operation[name[lmax] + constant[1]] ** constant[2]]
if compare[binary_operation[call[name[dh_mask].shape][constant[0]] <ast.Mod object at 0x7da2590d6920> constant[2]] not_equal[!=] constant[0]] begin[:]
<ast.Raise object at 0x7da20c6a9150>
if compare[call[name[dh_mask].shape][constant[1]] equal[==] call[name[dh_mask].shape][constant[0]]] begin[:]
variable[_sampling] assign[=] constant[1]
variable[mask_lm] assign[=] call[name[_shtools].SHExpandDH, parameter[name[dh_mask]]]
variable[area] assign[=] binary_operation[binary_operation[call[name[mask_lm]][tuple[[<ast.Constant object at 0x7da20c6a8700>, <ast.Constant object at 0x7da20c6a9fc0>, <ast.Constant object at 0x7da20c6a91e0>]]] * constant[4]] * name[_np].pi]
<ast.Tuple object at 0x7da20c6aa6b0> assign[=] call[name[_shtools].SHReturnTapersMap, parameter[name[dh_mask], name[lmax]]]
return[call[name[SlepianMask], parameter[name[tapers], name[eigenvalues], name[area]]]] | keyword[def] identifier[from_mask] ( identifier[cls] , identifier[dh_mask] , identifier[lmax] , identifier[nmax] = keyword[None] ):
literal[string]
keyword[if] identifier[nmax] keyword[is] keyword[None] :
identifier[nmax] =( identifier[lmax] + literal[int] )** literal[int]
keyword[else] :
keyword[if] identifier[nmax] >( identifier[lmax] + literal[int] )** literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string]
. identifier[format] ( identifier[lmax] , identifier[nmax] ))
keyword[if] identifier[dh_mask] . identifier[shape] [ literal[int] ]% literal[int] != literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string]
. identifier[format] ( identifier[dh_mask] . identifier[shape] [ literal[int] ]))
keyword[if] identifier[dh_mask] . identifier[shape] [ literal[int] ]== identifier[dh_mask] . identifier[shape] [ literal[int] ]:
identifier[_sampling] = literal[int]
keyword[elif] identifier[dh_mask] . identifier[shape] [ literal[int] ]== literal[int] * identifier[dh_mask] . identifier[shape] [ literal[int] ]:
identifier[_sampling] = literal[int]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] +
literal[string]
. identifier[format] ( identifier[dh_mask] . identifier[shape] [ literal[int] ], identifier[dh_mask] . identifier[shape] [ literal[int] ]))
identifier[mask_lm] = identifier[_shtools] . identifier[SHExpandDH] ( identifier[dh_mask] , identifier[sampling] = identifier[_sampling] , identifier[lmax_calc] = literal[int] )
identifier[area] = identifier[mask_lm] [ literal[int] , literal[int] , literal[int] ]* literal[int] * identifier[_np] . identifier[pi]
identifier[tapers] , identifier[eigenvalues] = identifier[_shtools] . identifier[SHReturnTapersMap] ( identifier[dh_mask] , identifier[lmax] ,
identifier[ntapers] = identifier[nmax] )
keyword[return] identifier[SlepianMask] ( identifier[tapers] , identifier[eigenvalues] , identifier[area] , identifier[copy] = keyword[False] ) | def from_mask(cls, dh_mask, lmax, nmax=None):
"""
Construct Slepian functions that are optimally concentrated within
the region specified by a mask.
Usage
-----
x = Slepian.from_mask(dh_mask, lmax, [nmax])
Returns
-------
x : Slepian class instance
Parameters
----------
dh_mask :ndarray, shape (nlat, nlon)
A Driscoll and Healy (1994) sampled grid describing the
concentration region R. All elements should either be 1 (for inside
the concentration region) or 0 (for outside the concentration
region). The grid must have dimensions nlon=nlat or nlon=2*nlat,
where nlat is even.
lmax : int
The spherical harmonic bandwidth of the Slepian functions.
nmax : int, optional, default = (lmax+1)**2
The number of best-concentrated eigenvalues and eigenfunctions to
return.
"""
if nmax is None:
nmax = (lmax + 1) ** 2 # depends on [control=['if'], data=['nmax']]
elif nmax > (lmax + 1) ** 2:
raise ValueError('nmax must be less than or equal to ' + '(lmax + 1)**2. lmax = {:d} and nmax = {:d}'.format(lmax, nmax)) # depends on [control=['if'], data=['nmax']]
if dh_mask.shape[0] % 2 != 0:
raise ValueError('The number of latitude bands in dh_mask ' + 'must be even. nlat = {:d}'.format(dh_mask.shape[0])) # depends on [control=['if'], data=[]]
if dh_mask.shape[1] == dh_mask.shape[0]:
_sampling = 1 # depends on [control=['if'], data=[]]
elif dh_mask.shape[1] == 2 * dh_mask.shape[0]:
_sampling = 2 # depends on [control=['if'], data=[]]
else:
raise ValueError('dh_mask must be dimensioned as (n, n) or ' + '(n, 2 * n). Input shape is ({:d}, {:d})'.format(dh_mask.shape[0], dh_mask.shape[1]))
mask_lm = _shtools.SHExpandDH(dh_mask, sampling=_sampling, lmax_calc=0)
area = mask_lm[0, 0, 0] * 4 * _np.pi
(tapers, eigenvalues) = _shtools.SHReturnTapersMap(dh_mask, lmax, ntapers=nmax)
return SlepianMask(tapers, eigenvalues, area, copy=False) |
def getBezierPaths(self,origin=None):
"""
This function returns array that can be used as a Cubic Bezier
Path in matplotlib.
The function returns two arrays, the first one contains
the verticies for each particles and has the shape
(Nvert, Nparticles, 2) where Nvert is the number of verticies.
The second array returned describes the type of verticies to be
used with matplotlib's Patch class.
Arguments
---------
origin : multiple, optional
If `origin` is None (default), then none of the
coordinates are shifted. If `origin` is an integer
then the particle with that index is used as the
origin. if `origin` is equal to `com`, then the
centre of mass is used as the origin.
Examples
--------
The following example reads in a SimulationArchive and plots
the trajectories as Cubic Bezier Curves. It also plots the
actual datapoints stored in the SimulationArchive.
Note that the SimulationArchive needs to have enough
datapoints to allow for smooth and reasonable orbits.
>>> from matplotlib.path import Path
>>> import matplotlib.patches as patches
>>> sa = rebound.SimulationArchive("test.bin")
>>> verts, codes = sa.getBezierPaths(origin=0)
>>> fig, ax = plt.subplots()
>>> for j in range(sa[0].N):
>>> path = Path(verts[:,j,:], codes)
>>> patch = patches.PathPatch(path, facecolor='none')
>>> ax.add_patch(patch)
>>> ax.scatter(verts[::3,j,0],verts[::3,j,1])
>>> ax.set_aspect('equal')
>>> ax.autoscale_view()
"""
import numpy as np
Npoints = len(self)*3-2
if len(self)<=1:
raise Runtim
Nparticles = self[0].N
verts = np.zeros((Npoints,Nparticles,2))
xy = np.zeros((len(self),Nparticles,2))
if origin=="com":
origin = -2
elif origin is not None:
try:
origin = int(origin)
except:
raise AttributeError("Cannot parse origin")
if origin<0 or origin>=Nparticles:
raise AttributeError("Origin index out of range")
for i, sim in enumerate(self):
if origin is None:
shift = (0,0,0,0)
elif origin == -2:
sp = sim.calculate_com()
shift = (sp.x,sp.y,sp.vx,sp.vy)
else:
sp = sim.particles[origin]
shift = (sp.x,sp.y,sp.vx,sp.vy)
for j in range(sim.N):
p = sim.particles[j]
if i==0:
verts[0,j] = p.x-shift[0],p.y-shift[1]
verts[1,j] = p.vx-shift[2], p.vy-shift[3]
else:
dt = sim.t-tlast # time since last snapshot
verts[-2+i*3,j] = verts[-2+i*3,j]*dt/3.+verts[-3+i*3,j]
verts[ 0+i*3,j] = p.x-shift[0],p.y-shift[1]
verts[-1+i*3,j] = -p.vx+shift[2], -p.vy+shift[3]
verts[-1+i*3,j] = verts[-1+i*3+0,j]*dt/3.+verts[ 0+i*3,j]
if i!=len(self)-1:
verts[+1+i*3,j] = p.vx-shift[2], p.vy-shift[3]
xy[i,j] = p.x,p.y
tlast = sim.t
codes = np.full(Npoints,4,dtype=np.uint8) # Hardcoded 4 = matplotlib.path.Path.CURVE4
codes[0] = 1 # Hardcoded 1 = matplotlib.path.Path.MOVETO
return verts, codes | def function[getBezierPaths, parameter[self, origin]]:
constant[
This function returns array that can be used as a Cubic Bezier
Path in matplotlib.
The function returns two arrays, the first one contains
the verticies for each particles and has the shape
(Nvert, Nparticles, 2) where Nvert is the number of verticies.
The second array returned describes the type of verticies to be
used with matplotlib's Patch class.
Arguments
---------
origin : multiple, optional
If `origin` is None (default), then none of the
coordinates are shifted. If `origin` is an integer
then the particle with that index is used as the
origin. if `origin` is equal to `com`, then the
centre of mass is used as the origin.
Examples
--------
The following example reads in a SimulationArchive and plots
the trajectories as Cubic Bezier Curves. It also plots the
actual datapoints stored in the SimulationArchive.
Note that the SimulationArchive needs to have enough
datapoints to allow for smooth and reasonable orbits.
>>> from matplotlib.path import Path
>>> import matplotlib.patches as patches
>>> sa = rebound.SimulationArchive("test.bin")
>>> verts, codes = sa.getBezierPaths(origin=0)
>>> fig, ax = plt.subplots()
>>> for j in range(sa[0].N):
>>> path = Path(verts[:,j,:], codes)
>>> patch = patches.PathPatch(path, facecolor='none')
>>> ax.add_patch(patch)
>>> ax.scatter(verts[::3,j,0],verts[::3,j,1])
>>> ax.set_aspect('equal')
>>> ax.autoscale_view()
]
import module[numpy] as alias[np]
variable[Npoints] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[self]]] * constant[3]] - constant[2]]
if compare[call[name[len], parameter[name[self]]] less_or_equal[<=] constant[1]] begin[:]
<ast.Raise object at 0x7da2043454b0>
variable[Nparticles] assign[=] call[name[self]][constant[0]].N
variable[verts] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da2043472b0>, <ast.Name object at 0x7da204345e40>, <ast.Constant object at 0x7da204344e50>]]]]
variable[xy] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da204347100>, <ast.Name object at 0x7da204347850>, <ast.Constant object at 0x7da204344100>]]]]
if compare[name[origin] equal[==] constant[com]] begin[:]
variable[origin] assign[=] <ast.UnaryOp object at 0x7da204345c60>
for taget[tuple[[<ast.Name object at 0x7da204345ae0>, <ast.Name object at 0x7da204345450>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:]
if compare[name[origin] is constant[None]] begin[:]
variable[shift] assign[=] tuple[[<ast.Constant object at 0x7da204347040>, <ast.Constant object at 0x7da204345900>, <ast.Constant object at 0x7da204344ee0>, <ast.Constant object at 0x7da2043462f0>]]
for taget[name[j]] in starred[call[name[range], parameter[name[sim].N]]] begin[:]
variable[p] assign[=] call[name[sim].particles][name[j]]
if compare[name[i] equal[==] constant[0]] begin[:]
call[name[verts]][tuple[[<ast.Constant object at 0x7da2043459c0>, <ast.Name object at 0x7da204346500>]]] assign[=] tuple[[<ast.BinOp object at 0x7da204346e90>, <ast.BinOp object at 0x7da204347e50>]]
call[name[verts]][tuple[[<ast.Constant object at 0x7da2043466e0>, <ast.Name object at 0x7da204345810>]]] assign[=] tuple[[<ast.BinOp object at 0x7da204347880>, <ast.BinOp object at 0x7da204345990>]]
call[name[xy]][tuple[[<ast.Name object at 0x7da18f812f20>, <ast.Name object at 0x7da18f813f10>]]] assign[=] tuple[[<ast.Attribute object at 0x7da18f811b10>, <ast.Attribute object at 0x7da18f811990>]]
variable[tlast] assign[=] name[sim].t
variable[codes] assign[=] call[name[np].full, parameter[name[Npoints], constant[4]]]
call[name[codes]][constant[0]] assign[=] constant[1]
return[tuple[[<ast.Name object at 0x7da18f813100>, <ast.Name object at 0x7da18f811690>]]] | keyword[def] identifier[getBezierPaths] ( identifier[self] , identifier[origin] = keyword[None] ):
literal[string]
keyword[import] identifier[numpy] keyword[as] identifier[np]
identifier[Npoints] = identifier[len] ( identifier[self] )* literal[int] - literal[int]
keyword[if] identifier[len] ( identifier[self] )<= literal[int] :
keyword[raise] identifier[Runtim]
identifier[Nparticles] = identifier[self] [ literal[int] ]. identifier[N]
identifier[verts] = identifier[np] . identifier[zeros] (( identifier[Npoints] , identifier[Nparticles] , literal[int] ))
identifier[xy] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[self] ), identifier[Nparticles] , literal[int] ))
keyword[if] identifier[origin] == literal[string] :
identifier[origin] =- literal[int]
keyword[elif] identifier[origin] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[origin] = identifier[int] ( identifier[origin] )
keyword[except] :
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[if] identifier[origin] < literal[int] keyword[or] identifier[origin] >= identifier[Nparticles] :
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[for] identifier[i] , identifier[sim] keyword[in] identifier[enumerate] ( identifier[self] ):
keyword[if] identifier[origin] keyword[is] keyword[None] :
identifier[shift] =( literal[int] , literal[int] , literal[int] , literal[int] )
keyword[elif] identifier[origin] ==- literal[int] :
identifier[sp] = identifier[sim] . identifier[calculate_com] ()
identifier[shift] =( identifier[sp] . identifier[x] , identifier[sp] . identifier[y] , identifier[sp] . identifier[vx] , identifier[sp] . identifier[vy] )
keyword[else] :
identifier[sp] = identifier[sim] . identifier[particles] [ identifier[origin] ]
identifier[shift] =( identifier[sp] . identifier[x] , identifier[sp] . identifier[y] , identifier[sp] . identifier[vx] , identifier[sp] . identifier[vy] )
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[sim] . identifier[N] ):
identifier[p] = identifier[sim] . identifier[particles] [ identifier[j] ]
keyword[if] identifier[i] == literal[int] :
identifier[verts] [ literal[int] , identifier[j] ]= identifier[p] . identifier[x] - identifier[shift] [ literal[int] ], identifier[p] . identifier[y] - identifier[shift] [ literal[int] ]
identifier[verts] [ literal[int] , identifier[j] ]= identifier[p] . identifier[vx] - identifier[shift] [ literal[int] ], identifier[p] . identifier[vy] - identifier[shift] [ literal[int] ]
keyword[else] :
identifier[dt] = identifier[sim] . identifier[t] - identifier[tlast]
identifier[verts] [- literal[int] + identifier[i] * literal[int] , identifier[j] ]= identifier[verts] [- literal[int] + identifier[i] * literal[int] , identifier[j] ]* identifier[dt] / literal[int] + identifier[verts] [- literal[int] + identifier[i] * literal[int] , identifier[j] ]
identifier[verts] [ literal[int] + identifier[i] * literal[int] , identifier[j] ]= identifier[p] . identifier[x] - identifier[shift] [ literal[int] ], identifier[p] . identifier[y] - identifier[shift] [ literal[int] ]
identifier[verts] [- literal[int] + identifier[i] * literal[int] , identifier[j] ]=- identifier[p] . identifier[vx] + identifier[shift] [ literal[int] ],- identifier[p] . identifier[vy] + identifier[shift] [ literal[int] ]
identifier[verts] [- literal[int] + identifier[i] * literal[int] , identifier[j] ]= identifier[verts] [- literal[int] + identifier[i] * literal[int] + literal[int] , identifier[j] ]* identifier[dt] / literal[int] + identifier[verts] [ literal[int] + identifier[i] * literal[int] , identifier[j] ]
keyword[if] identifier[i] != identifier[len] ( identifier[self] )- literal[int] :
identifier[verts] [+ literal[int] + identifier[i] * literal[int] , identifier[j] ]= identifier[p] . identifier[vx] - identifier[shift] [ literal[int] ], identifier[p] . identifier[vy] - identifier[shift] [ literal[int] ]
identifier[xy] [ identifier[i] , identifier[j] ]= identifier[p] . identifier[x] , identifier[p] . identifier[y]
identifier[tlast] = identifier[sim] . identifier[t]
identifier[codes] = identifier[np] . identifier[full] ( identifier[Npoints] , literal[int] , identifier[dtype] = identifier[np] . identifier[uint8] )
identifier[codes] [ literal[int] ]= literal[int]
keyword[return] identifier[verts] , identifier[codes] | def getBezierPaths(self, origin=None):
"""
This function returns array that can be used as a Cubic Bezier
Path in matplotlib.
The function returns two arrays, the first one contains
the verticies for each particles and has the shape
(Nvert, Nparticles, 2) where Nvert is the number of verticies.
The second array returned describes the type of verticies to be
used with matplotlib's Patch class.
Arguments
---------
origin : multiple, optional
If `origin` is None (default), then none of the
coordinates are shifted. If `origin` is an integer
then the particle with that index is used as the
origin. if `origin` is equal to `com`, then the
centre of mass is used as the origin.
Examples
--------
The following example reads in a SimulationArchive and plots
the trajectories as Cubic Bezier Curves. It also plots the
actual datapoints stored in the SimulationArchive.
Note that the SimulationArchive needs to have enough
datapoints to allow for smooth and reasonable orbits.
>>> from matplotlib.path import Path
>>> import matplotlib.patches as patches
>>> sa = rebound.SimulationArchive("test.bin")
>>> verts, codes = sa.getBezierPaths(origin=0)
>>> fig, ax = plt.subplots()
>>> for j in range(sa[0].N):
>>> path = Path(verts[:,j,:], codes)
>>> patch = patches.PathPatch(path, facecolor='none')
>>> ax.add_patch(patch)
>>> ax.scatter(verts[::3,j,0],verts[::3,j,1])
>>> ax.set_aspect('equal')
>>> ax.autoscale_view()
"""
import numpy as np
Npoints = len(self) * 3 - 2
if len(self) <= 1:
raise Runtim # depends on [control=['if'], data=[]]
Nparticles = self[0].N
verts = np.zeros((Npoints, Nparticles, 2))
xy = np.zeros((len(self), Nparticles, 2))
if origin == 'com':
origin = -2 # depends on [control=['if'], data=['origin']]
elif origin is not None:
try:
origin = int(origin) # depends on [control=['try'], data=[]]
except:
raise AttributeError('Cannot parse origin') # depends on [control=['except'], data=[]]
if origin < 0 or origin >= Nparticles:
raise AttributeError('Origin index out of range') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['origin']]
for (i, sim) in enumerate(self):
if origin is None:
shift = (0, 0, 0, 0) # depends on [control=['if'], data=[]]
elif origin == -2:
sp = sim.calculate_com()
shift = (sp.x, sp.y, sp.vx, sp.vy) # depends on [control=['if'], data=[]]
else:
sp = sim.particles[origin]
shift = (sp.x, sp.y, sp.vx, sp.vy)
for j in range(sim.N):
p = sim.particles[j]
if i == 0:
verts[0, j] = (p.x - shift[0], p.y - shift[1])
verts[1, j] = (p.vx - shift[2], p.vy - shift[3]) # depends on [control=['if'], data=[]]
else:
dt = sim.t - tlast # time since last snapshot
verts[-2 + i * 3, j] = verts[-2 + i * 3, j] * dt / 3.0 + verts[-3 + i * 3, j]
verts[0 + i * 3, j] = (p.x - shift[0], p.y - shift[1])
verts[-1 + i * 3, j] = (-p.vx + shift[2], -p.vy + shift[3])
verts[-1 + i * 3, j] = verts[-1 + i * 3 + 0, j] * dt / 3.0 + verts[0 + i * 3, j]
if i != len(self) - 1:
verts[+1 + i * 3, j] = (p.vx - shift[2], p.vy - shift[3]) # depends on [control=['if'], data=['i']]
xy[i, j] = (p.x, p.y) # depends on [control=['for'], data=['j']]
tlast = sim.t # depends on [control=['for'], data=[]]
codes = np.full(Npoints, 4, dtype=np.uint8) # Hardcoded 4 = matplotlib.path.Path.CURVE4
codes[0] = 1 # Hardcoded 1 = matplotlib.path.Path.MOVETO
return (verts, codes) |
def contains(self, stimtype):
"""Returns whether the specified stimlus type is a component in this stimulus
:param stimtype: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` subclass class name to test for membership in the components of this stimulus
:type stimtype: str
:returns: bool -- if the stimtype is in the model
"""
for track in self._segments:
for component in track:
if component.__class__.__name__ == stimtype:
return True
return False | def function[contains, parameter[self, stimtype]]:
constant[Returns whether the specified stimlus type is a component in this stimulus
:param stimtype: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` subclass class name to test for membership in the components of this stimulus
:type stimtype: str
:returns: bool -- if the stimtype is in the model
]
for taget[name[track]] in starred[name[self]._segments] begin[:]
for taget[name[component]] in starred[name[track]] begin[:]
if compare[name[component].__class__.__name__ equal[==] name[stimtype]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[contains] ( identifier[self] , identifier[stimtype] ):
literal[string]
keyword[for] identifier[track] keyword[in] identifier[self] . identifier[_segments] :
keyword[for] identifier[component] keyword[in] identifier[track] :
keyword[if] identifier[component] . identifier[__class__] . identifier[__name__] == identifier[stimtype] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def contains(self, stimtype):
"""Returns whether the specified stimlus type is a component in this stimulus
:param stimtype: :class:`AbstractStimulusComponent<sparkle.stim.abstract_component.AbstractStimulusComponent>` subclass class name to test for membership in the components of this stimulus
:type stimtype: str
:returns: bool -- if the stimtype is in the model
"""
for track in self._segments:
for component in track:
if component.__class__.__name__ == stimtype:
return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['component']] # depends on [control=['for'], data=['track']]
return False |
def _UpdateAndMigrateUnmerged(self, not_merged_stops, zone_map, merge_map,
schedule):
"""Correct references in migrated unmerged stops and add to merged_schedule.
For stops migrated from one of the input feeds to the output feed update the
parent_station and zone_id references to point to objects in the output
feed. Then add the migrated stop to the new schedule.
Args:
not_merged_stops: list of stops from one input feed that have not been
merged
zone_map: map from zone_id in the input feed to zone_id in the output feed
merge_map: map from Stop objects in the input feed to Stop objects in
the output feed
schedule: the input Schedule object
"""
# for the unmerged stops, we use an already mapped zone_id if possible
# if not, we generate a new one and add it to the map
for stop, migrated_stop in not_merged_stops:
if stop.zone_id in zone_map:
migrated_stop.zone_id = zone_map[stop.zone_id]
else:
migrated_stop.zone_id = self.feed_merger.GenerateId(stop.zone_id)
zone_map[stop.zone_id] = migrated_stop.zone_id
if stop.parent_station:
parent_original = schedule.GetStop(stop.parent_station)
migrated_stop.parent_station = merge_map[parent_original].stop_id
self.feed_merger.merged_schedule.AddStopObject(migrated_stop) | def function[_UpdateAndMigrateUnmerged, parameter[self, not_merged_stops, zone_map, merge_map, schedule]]:
constant[Correct references in migrated unmerged stops and add to merged_schedule.
For stops migrated from one of the input feeds to the output feed update the
parent_station and zone_id references to point to objects in the output
feed. Then add the migrated stop to the new schedule.
Args:
not_merged_stops: list of stops from one input feed that have not been
merged
zone_map: map from zone_id in the input feed to zone_id in the output feed
merge_map: map from Stop objects in the input feed to Stop objects in
the output feed
schedule: the input Schedule object
]
for taget[tuple[[<ast.Name object at 0x7da1b18495a0>, <ast.Name object at 0x7da1b18483d0>]]] in starred[name[not_merged_stops]] begin[:]
if compare[name[stop].zone_id in name[zone_map]] begin[:]
name[migrated_stop].zone_id assign[=] call[name[zone_map]][name[stop].zone_id]
if name[stop].parent_station begin[:]
variable[parent_original] assign[=] call[name[schedule].GetStop, parameter[name[stop].parent_station]]
name[migrated_stop].parent_station assign[=] call[name[merge_map]][name[parent_original]].stop_id
call[name[self].feed_merger.merged_schedule.AddStopObject, parameter[name[migrated_stop]]] | keyword[def] identifier[_UpdateAndMigrateUnmerged] ( identifier[self] , identifier[not_merged_stops] , identifier[zone_map] , identifier[merge_map] ,
identifier[schedule] ):
literal[string]
keyword[for] identifier[stop] , identifier[migrated_stop] keyword[in] identifier[not_merged_stops] :
keyword[if] identifier[stop] . identifier[zone_id] keyword[in] identifier[zone_map] :
identifier[migrated_stop] . identifier[zone_id] = identifier[zone_map] [ identifier[stop] . identifier[zone_id] ]
keyword[else] :
identifier[migrated_stop] . identifier[zone_id] = identifier[self] . identifier[feed_merger] . identifier[GenerateId] ( identifier[stop] . identifier[zone_id] )
identifier[zone_map] [ identifier[stop] . identifier[zone_id] ]= identifier[migrated_stop] . identifier[zone_id]
keyword[if] identifier[stop] . identifier[parent_station] :
identifier[parent_original] = identifier[schedule] . identifier[GetStop] ( identifier[stop] . identifier[parent_station] )
identifier[migrated_stop] . identifier[parent_station] = identifier[merge_map] [ identifier[parent_original] ]. identifier[stop_id]
identifier[self] . identifier[feed_merger] . identifier[merged_schedule] . identifier[AddStopObject] ( identifier[migrated_stop] ) | def _UpdateAndMigrateUnmerged(self, not_merged_stops, zone_map, merge_map, schedule):
"""Correct references in migrated unmerged stops and add to merged_schedule.
For stops migrated from one of the input feeds to the output feed update the
parent_station and zone_id references to point to objects in the output
feed. Then add the migrated stop to the new schedule.
Args:
not_merged_stops: list of stops from one input feed that have not been
merged
zone_map: map from zone_id in the input feed to zone_id in the output feed
merge_map: map from Stop objects in the input feed to Stop objects in
the output feed
schedule: the input Schedule object
"""
# for the unmerged stops, we use an already mapped zone_id if possible
# if not, we generate a new one and add it to the map
for (stop, migrated_stop) in not_merged_stops:
if stop.zone_id in zone_map:
migrated_stop.zone_id = zone_map[stop.zone_id] # depends on [control=['if'], data=['zone_map']]
else:
migrated_stop.zone_id = self.feed_merger.GenerateId(stop.zone_id)
zone_map[stop.zone_id] = migrated_stop.zone_id
if stop.parent_station:
parent_original = schedule.GetStop(stop.parent_station)
migrated_stop.parent_station = merge_map[parent_original].stop_id # depends on [control=['if'], data=[]]
self.feed_merger.merged_schedule.AddStopObject(migrated_stop) # depends on [control=['for'], data=[]] |
def getTypeFunc(self, data):
"""
Returns a callable that will encode C{data} to C{self.stream}. If
C{data} is unencodable, then C{None} is returned.
"""
if data is None:
return self.writeNull
t = type(data)
# try types that we know will work
if t is str or issubclass(t, str):
return self.writeBytes
if t is unicode or issubclass(t, unicode):
return self.writeString
elif t is bool:
return self.writeBoolean
elif t is float:
return self.writeNumber
elif t in python.int_types:
return self.writeNumber
elif t in (list, tuple):
return self.writeList
elif isinstance(data, (list, tuple)):
return self.writeSequence
elif t is types.GeneratorType:
return self.writeGenerator
elif t is pyamf.UndefinedType:
return self.writeUndefined
elif t in (datetime.date, datetime.datetime, datetime.time):
return self.writeDate
elif xml.is_xml(data):
return self.writeXML
# check for any overridden types
for type_, func in pyamf.TYPE_MAP.iteritems():
try:
if isinstance(data, type_):
return _CustomTypeFunc(self, func)
except TypeError:
if python.callable(type_) and type_(data):
return _CustomTypeFunc(self, func)
# now try some types that won't encode
if t in python.class_types:
# can't encode classes
return None
elif isinstance(data, python.func_types):
# can't encode code objects
return None
elif isinstance(t, types.ModuleType):
# cannot encode module objects
return None
# well, we tried ..
return self.writeObject | def function[getTypeFunc, parameter[self, data]]:
constant[
Returns a callable that will encode C{data} to C{self.stream}. If
C{data} is unencodable, then C{None} is returned.
]
if compare[name[data] is constant[None]] begin[:]
return[name[self].writeNull]
variable[t] assign[=] call[name[type], parameter[name[data]]]
if <ast.BoolOp object at 0x7da207f01a50> begin[:]
return[name[self].writeBytes]
if <ast.BoolOp object at 0x7da20e9b2b60> begin[:]
return[name[self].writeString]
for taget[tuple[[<ast.Name object at 0x7da20e9b2680>, <ast.Name object at 0x7da20e9b0550>]]] in starred[call[name[pyamf].TYPE_MAP.iteritems, parameter[]]] begin[:]
<ast.Try object at 0x7da20e9b0220>
if compare[name[t] in name[python].class_types] begin[:]
return[constant[None]]
return[name[self].writeObject] | keyword[def] identifier[getTypeFunc] ( identifier[self] , identifier[data] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
keyword[return] identifier[self] . identifier[writeNull]
identifier[t] = identifier[type] ( identifier[data] )
keyword[if] identifier[t] keyword[is] identifier[str] keyword[or] identifier[issubclass] ( identifier[t] , identifier[str] ):
keyword[return] identifier[self] . identifier[writeBytes]
keyword[if] identifier[t] keyword[is] identifier[unicode] keyword[or] identifier[issubclass] ( identifier[t] , identifier[unicode] ):
keyword[return] identifier[self] . identifier[writeString]
keyword[elif] identifier[t] keyword[is] identifier[bool] :
keyword[return] identifier[self] . identifier[writeBoolean]
keyword[elif] identifier[t] keyword[is] identifier[float] :
keyword[return] identifier[self] . identifier[writeNumber]
keyword[elif] identifier[t] keyword[in] identifier[python] . identifier[int_types] :
keyword[return] identifier[self] . identifier[writeNumber]
keyword[elif] identifier[t] keyword[in] ( identifier[list] , identifier[tuple] ):
keyword[return] identifier[self] . identifier[writeList]
keyword[elif] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )):
keyword[return] identifier[self] . identifier[writeSequence]
keyword[elif] identifier[t] keyword[is] identifier[types] . identifier[GeneratorType] :
keyword[return] identifier[self] . identifier[writeGenerator]
keyword[elif] identifier[t] keyword[is] identifier[pyamf] . identifier[UndefinedType] :
keyword[return] identifier[self] . identifier[writeUndefined]
keyword[elif] identifier[t] keyword[in] ( identifier[datetime] . identifier[date] , identifier[datetime] . identifier[datetime] , identifier[datetime] . identifier[time] ):
keyword[return] identifier[self] . identifier[writeDate]
keyword[elif] identifier[xml] . identifier[is_xml] ( identifier[data] ):
keyword[return] identifier[self] . identifier[writeXML]
keyword[for] identifier[type_] , identifier[func] keyword[in] identifier[pyamf] . identifier[TYPE_MAP] . identifier[iteritems] ():
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[data] , identifier[type_] ):
keyword[return] identifier[_CustomTypeFunc] ( identifier[self] , identifier[func] )
keyword[except] identifier[TypeError] :
keyword[if] identifier[python] . identifier[callable] ( identifier[type_] ) keyword[and] identifier[type_] ( identifier[data] ):
keyword[return] identifier[_CustomTypeFunc] ( identifier[self] , identifier[func] )
keyword[if] identifier[t] keyword[in] identifier[python] . identifier[class_types] :
keyword[return] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[python] . identifier[func_types] ):
keyword[return] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[t] , identifier[types] . identifier[ModuleType] ):
keyword[return] keyword[None]
keyword[return] identifier[self] . identifier[writeObject] | def getTypeFunc(self, data):
"""
Returns a callable that will encode C{data} to C{self.stream}. If
C{data} is unencodable, then C{None} is returned.
"""
if data is None:
return self.writeNull # depends on [control=['if'], data=[]]
t = type(data)
# try types that we know will work
if t is str or issubclass(t, str):
return self.writeBytes # depends on [control=['if'], data=[]]
if t is unicode or issubclass(t, unicode):
return self.writeString # depends on [control=['if'], data=[]]
elif t is bool:
return self.writeBoolean # depends on [control=['if'], data=[]]
elif t is float:
return self.writeNumber # depends on [control=['if'], data=[]]
elif t in python.int_types:
return self.writeNumber # depends on [control=['if'], data=[]]
elif t in (list, tuple):
return self.writeList # depends on [control=['if'], data=[]]
elif isinstance(data, (list, tuple)):
return self.writeSequence # depends on [control=['if'], data=[]]
elif t is types.GeneratorType:
return self.writeGenerator # depends on [control=['if'], data=[]]
elif t is pyamf.UndefinedType:
return self.writeUndefined # depends on [control=['if'], data=[]]
elif t in (datetime.date, datetime.datetime, datetime.time):
return self.writeDate # depends on [control=['if'], data=[]]
elif xml.is_xml(data):
return self.writeXML # depends on [control=['if'], data=[]]
# check for any overridden types
for (type_, func) in pyamf.TYPE_MAP.iteritems():
try:
if isinstance(data, type_):
return _CustomTypeFunc(self, func) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except TypeError:
if python.callable(type_) and type_(data):
return _CustomTypeFunc(self, func) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
# now try some types that won't encode
if t in python.class_types:
# can't encode classes
return None # depends on [control=['if'], data=[]]
elif isinstance(data, python.func_types):
# can't encode code objects
return None # depends on [control=['if'], data=[]]
elif isinstance(t, types.ModuleType):
# cannot encode module objects
return None # depends on [control=['if'], data=[]]
# well, we tried ..
return self.writeObject |
def calc_structured_append_parity(content):
"""\
Calculates the parity data for the Structured Append mode.
:param str content: The content.
:rtype: int
"""
if not isinstance(content, str_type):
content = str(content)
try:
data = content.encode('iso-8859-1')
except UnicodeError:
try:
data = content.encode('shift-jis')
except (LookupError, UnicodeError):
data = content.encode('utf-8')
if _PY2:
data = (ord(c) for c in data)
return reduce(xor, data) | def function[calc_structured_append_parity, parameter[content]]:
constant[ Calculates the parity data for the Structured Append mode.
:param str content: The content.
:rtype: int
]
if <ast.UnaryOp object at 0x7da2054a4520> begin[:]
variable[content] assign[=] call[name[str], parameter[name[content]]]
<ast.Try object at 0x7da2054a5c00>
if name[_PY2] begin[:]
variable[data] assign[=] <ast.GeneratorExp object at 0x7da207f01cc0>
return[call[name[reduce], parameter[name[xor], name[data]]]] | keyword[def] identifier[calc_structured_append_parity] ( identifier[content] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[content] , identifier[str_type] ):
identifier[content] = identifier[str] ( identifier[content] )
keyword[try] :
identifier[data] = identifier[content] . identifier[encode] ( literal[string] )
keyword[except] identifier[UnicodeError] :
keyword[try] :
identifier[data] = identifier[content] . identifier[encode] ( literal[string] )
keyword[except] ( identifier[LookupError] , identifier[UnicodeError] ):
identifier[data] = identifier[content] . identifier[encode] ( literal[string] )
keyword[if] identifier[_PY2] :
identifier[data] =( identifier[ord] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[data] )
keyword[return] identifier[reduce] ( identifier[xor] , identifier[data] ) | def calc_structured_append_parity(content):
""" Calculates the parity data for the Structured Append mode.
:param str content: The content.
:rtype: int
"""
if not isinstance(content, str_type):
content = str(content) # depends on [control=['if'], data=[]]
try:
data = content.encode('iso-8859-1') # depends on [control=['try'], data=[]]
except UnicodeError:
try:
data = content.encode('shift-jis') # depends on [control=['try'], data=[]]
except (LookupError, UnicodeError):
data = content.encode('utf-8') # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
if _PY2:
data = (ord(c) for c in data) # depends on [control=['if'], data=[]]
return reduce(xor, data) |
def get_token(self, force=False):
"""
Returns the auth token, if it is valid. If not, calls the auth endpoint
to get a new token. Passing 'True' to 'force' forces a call for a new
token, even if there already is a valid token.
"""
self.authenticated = self._has_valid_token()
if force or not self.authenticated:
self.authenticate()
return self.token | def function[get_token, parameter[self, force]]:
constant[
Returns the auth token, if it is valid. If not, calls the auth endpoint
to get a new token. Passing 'True' to 'force' forces a call for a new
token, even if there already is a valid token.
]
name[self].authenticated assign[=] call[name[self]._has_valid_token, parameter[]]
if <ast.BoolOp object at 0x7da1b05b0640> begin[:]
call[name[self].authenticate, parameter[]]
return[name[self].token] | keyword[def] identifier[get_token] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
identifier[self] . identifier[authenticated] = identifier[self] . identifier[_has_valid_token] ()
keyword[if] identifier[force] keyword[or] keyword[not] identifier[self] . identifier[authenticated] :
identifier[self] . identifier[authenticate] ()
keyword[return] identifier[self] . identifier[token] | def get_token(self, force=False):
"""
Returns the auth token, if it is valid. If not, calls the auth endpoint
to get a new token. Passing 'True' to 'force' forces a call for a new
token, even if there already is a valid token.
"""
self.authenticated = self._has_valid_token()
if force or not self.authenticated:
self.authenticate() # depends on [control=['if'], data=[]]
return self.token |
def _SafeEncodeBytes(field, value):
"""Encode the bytes in value as urlsafe base64."""
try:
if field.repeated:
result = [base64.urlsafe_b64encode(byte) for byte in value]
else:
result = base64.urlsafe_b64encode(value)
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete) | def function[_SafeEncodeBytes, parameter[field, value]]:
constant[Encode the bytes in value as urlsafe base64.]
<ast.Try object at 0x7da1b080a3b0>
return[call[name[CodecResult], parameter[]]] | keyword[def] identifier[_SafeEncodeBytes] ( identifier[field] , identifier[value] ):
literal[string]
keyword[try] :
keyword[if] identifier[field] . identifier[repeated] :
identifier[result] =[ identifier[base64] . identifier[urlsafe_b64encode] ( identifier[byte] ) keyword[for] identifier[byte] keyword[in] identifier[value] ]
keyword[else] :
identifier[result] = identifier[base64] . identifier[urlsafe_b64encode] ( identifier[value] )
identifier[complete] = keyword[True]
keyword[except] identifier[TypeError] :
identifier[result] = identifier[value]
identifier[complete] = keyword[False]
keyword[return] identifier[CodecResult] ( identifier[value] = identifier[result] , identifier[complete] = identifier[complete] ) | def _SafeEncodeBytes(field, value):
"""Encode the bytes in value as urlsafe base64."""
try:
if field.repeated:
result = [base64.urlsafe_b64encode(byte) for byte in value] # depends on [control=['if'], data=[]]
else:
result = base64.urlsafe_b64encode(value)
complete = True # depends on [control=['try'], data=[]]
except TypeError:
result = value
complete = False # depends on [control=['except'], data=[]]
return CodecResult(value=result, complete=complete) |
def convert_deconvolution(node, **kwargs):
"""Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator
and return the created node.
"""
name, inputs, attrs = get_inputs(node, kwargs)
kernel_dims = list(parse_helper(attrs, "kernel"))
stride_dims = list(parse_helper(attrs, "stride", [1, 1]))
pad_dims = list(parse_helper(attrs, "pad", [0, 0]))
num_group = int(attrs.get("num_group", 1))
dilations = list(parse_helper(attrs, "dilate", [1, 1]))
adj_dims = list(parse_helper(attrs, "adj", [0, 0]))
pad_dims = pad_dims + pad_dims
deconv_node = onnx.helper.make_node(
"ConvTranspose",
inputs=inputs,
outputs=[name],
kernel_shape=kernel_dims,
strides=stride_dims,
dilations=dilations,
output_padding=adj_dims,
pads=pad_dims,
group=num_group,
name=name
)
return [deconv_node] | def function[convert_deconvolution, parameter[node]]:
constant[Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator
and return the created node.
]
<ast.Tuple object at 0x7da1b20894b0> assign[=] call[name[get_inputs], parameter[name[node], name[kwargs]]]
variable[kernel_dims] assign[=] call[name[list], parameter[call[name[parse_helper], parameter[name[attrs], constant[kernel]]]]]
variable[stride_dims] assign[=] call[name[list], parameter[call[name[parse_helper], parameter[name[attrs], constant[stride], list[[<ast.Constant object at 0x7da1b208add0>, <ast.Constant object at 0x7da1b2089d20>]]]]]]
variable[pad_dims] assign[=] call[name[list], parameter[call[name[parse_helper], parameter[name[attrs], constant[pad], list[[<ast.Constant object at 0x7da1b208ac20>, <ast.Constant object at 0x7da1b208bca0>]]]]]]
variable[num_group] assign[=] call[name[int], parameter[call[name[attrs].get, parameter[constant[num_group], constant[1]]]]]
variable[dilations] assign[=] call[name[list], parameter[call[name[parse_helper], parameter[name[attrs], constant[dilate], list[[<ast.Constant object at 0x7da1b208a6e0>, <ast.Constant object at 0x7da1b208b7f0>]]]]]]
variable[adj_dims] assign[=] call[name[list], parameter[call[name[parse_helper], parameter[name[attrs], constant[adj], list[[<ast.Constant object at 0x7da1b20899f0>, <ast.Constant object at 0x7da1b208bac0>]]]]]]
variable[pad_dims] assign[=] binary_operation[name[pad_dims] + name[pad_dims]]
variable[deconv_node] assign[=] call[name[onnx].helper.make_node, parameter[constant[ConvTranspose]]]
return[list[[<ast.Name object at 0x7da1b208b370>]]] | keyword[def] identifier[convert_deconvolution] ( identifier[node] ,** identifier[kwargs] ):
literal[string]
identifier[name] , identifier[inputs] , identifier[attrs] = identifier[get_inputs] ( identifier[node] , identifier[kwargs] )
identifier[kernel_dims] = identifier[list] ( identifier[parse_helper] ( identifier[attrs] , literal[string] ))
identifier[stride_dims] = identifier[list] ( identifier[parse_helper] ( identifier[attrs] , literal[string] ,[ literal[int] , literal[int] ]))
identifier[pad_dims] = identifier[list] ( identifier[parse_helper] ( identifier[attrs] , literal[string] ,[ literal[int] , literal[int] ]))
identifier[num_group] = identifier[int] ( identifier[attrs] . identifier[get] ( literal[string] , literal[int] ))
identifier[dilations] = identifier[list] ( identifier[parse_helper] ( identifier[attrs] , literal[string] ,[ literal[int] , literal[int] ]))
identifier[adj_dims] = identifier[list] ( identifier[parse_helper] ( identifier[attrs] , literal[string] ,[ literal[int] , literal[int] ]))
identifier[pad_dims] = identifier[pad_dims] + identifier[pad_dims]
identifier[deconv_node] = identifier[onnx] . identifier[helper] . identifier[make_node] (
literal[string] ,
identifier[inputs] = identifier[inputs] ,
identifier[outputs] =[ identifier[name] ],
identifier[kernel_shape] = identifier[kernel_dims] ,
identifier[strides] = identifier[stride_dims] ,
identifier[dilations] = identifier[dilations] ,
identifier[output_padding] = identifier[adj_dims] ,
identifier[pads] = identifier[pad_dims] ,
identifier[group] = identifier[num_group] ,
identifier[name] = identifier[name]
)
keyword[return] [ identifier[deconv_node] ] | def convert_deconvolution(node, **kwargs):
"""Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator
and return the created node.
"""
(name, inputs, attrs) = get_inputs(node, kwargs)
kernel_dims = list(parse_helper(attrs, 'kernel'))
stride_dims = list(parse_helper(attrs, 'stride', [1, 1]))
pad_dims = list(parse_helper(attrs, 'pad', [0, 0]))
num_group = int(attrs.get('num_group', 1))
dilations = list(parse_helper(attrs, 'dilate', [1, 1]))
adj_dims = list(parse_helper(attrs, 'adj', [0, 0]))
pad_dims = pad_dims + pad_dims
deconv_node = onnx.helper.make_node('ConvTranspose', inputs=inputs, outputs=[name], kernel_shape=kernel_dims, strides=stride_dims, dilations=dilations, output_padding=adj_dims, pads=pad_dims, group=num_group, name=name)
return [deconv_node] |
def read_secret_metadata(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Retrieve the metadata and versions for the secret at the specified path.
Supported methods:
GET: /{mount_point}/metadata/{path}. Produces: 200 application/json
:param path: Specifies the path of the secret to read. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = '/v1/{mount_point}/metadata/{path}'.format(mount_point=mount_point, path=path)
response = self._adapter.get(
url=api_path,
)
return response.json() | def function[read_secret_metadata, parameter[self, path, mount_point]]:
constant[Retrieve the metadata and versions for the secret at the specified path.
Supported methods:
GET: /{mount_point}/metadata/{path}. Produces: 200 application/json
:param path: Specifies the path of the secret to read. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
]
variable[api_path] assign[=] call[constant[/v1/{mount_point}/metadata/{path}].format, parameter[]]
variable[response] assign[=] call[name[self]._adapter.get, parameter[]]
return[call[name[response].json, parameter[]]] | keyword[def] identifier[read_secret_metadata] ( identifier[self] , identifier[path] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ):
literal[string]
identifier[api_path] = literal[string] . identifier[format] ( identifier[mount_point] = identifier[mount_point] , identifier[path] = identifier[path] )
identifier[response] = identifier[self] . identifier[_adapter] . identifier[get] (
identifier[url] = identifier[api_path] ,
)
keyword[return] identifier[response] . identifier[json] () | def read_secret_metadata(self, path, mount_point=DEFAULT_MOUNT_POINT):
"""Retrieve the metadata and versions for the secret at the specified path.
Supported methods:
GET: /{mount_point}/metadata/{path}. Produces: 200 application/json
:param path: Specifies the path of the secret to read. This is specified as part of the URL.
:type path: str | unicode
:param mount_point: The "path" the secret engine was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = '/v1/{mount_point}/metadata/{path}'.format(mount_point=mount_point, path=path)
response = self._adapter.get(url=api_path)
return response.json() |
def get_ssh_client(
ip_addr,
ssh_key=None,
host_name=None,
ssh_tries=None,
propagate_fail=True,
username='root',
password='123456',
):
"""
Get a connected SSH client
Args:
ip_addr(str): IP address of the endpoint
ssh_key(str or list of str): Path to a file which
contains the private key
hotname(str): The hostname of the endpoint
ssh_tries(int): The number of attempts to connect to the endpoint
propagate_fail(bool): If set to true, this event will be in the log
and fail the outer stage. Otherwise, it will be discarded.
username(str): The username to authenticate with
password(str): Used for password authentication
or for private key decryption
Raises:
:exc:`~LagoSSHTimeoutException`: If the client failed to connect after
"ssh_tries"
"""
host_name = host_name or ip_addr
with LogTask(
'Get ssh client for %s' % host_name,
level='debug',
propagate_fail=propagate_fail,
):
ssh_timeout = int(config.get('ssh_timeout'))
if ssh_tries is None:
ssh_tries = int(config.get('ssh_tries', 10))
start_time = time.time()
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy(), )
while ssh_tries > 0:
try:
client.connect(
ip_addr,
username=username,
password=password,
key_filename=ssh_key,
timeout=ssh_timeout,
)
break
except (socket.error, socket.timeout) as err:
LOGGER.debug(
'Socket error connecting to %s: %s',
host_name,
err,
)
except paramiko.ssh_exception.SSHException as err:
LOGGER.debug(
'SSH error connecting to %s: %s',
host_name,
err,
)
except EOFError as err:
LOGGER.debug('EOFError connecting to %s: %s', host_name, err)
ssh_tries -= 1
LOGGER.debug(
'Still got %d tries for %s',
ssh_tries,
host_name,
)
time.sleep(1)
else:
end_time = time.time()
raise LagoSSHTimeoutException(
'Timed out (in %d s) trying to ssh to %s' %
(end_time - start_time, host_name)
)
return client | def function[get_ssh_client, parameter[ip_addr, ssh_key, host_name, ssh_tries, propagate_fail, username, password]]:
constant[
Get a connected SSH client
Args:
ip_addr(str): IP address of the endpoint
ssh_key(str or list of str): Path to a file which
contains the private key
hotname(str): The hostname of the endpoint
ssh_tries(int): The number of attempts to connect to the endpoint
propagate_fail(bool): If set to true, this event will be in the log
and fail the outer stage. Otherwise, it will be discarded.
username(str): The username to authenticate with
password(str): Used for password authentication
or for private key decryption
Raises:
:exc:`~LagoSSHTimeoutException`: If the client failed to connect after
"ssh_tries"
]
variable[host_name] assign[=] <ast.BoolOp object at 0x7da2041dbaf0>
with call[name[LogTask], parameter[binary_operation[constant[Get ssh client for %s] <ast.Mod object at 0x7da2590d6920> name[host_name]]]] begin[:]
variable[ssh_timeout] assign[=] call[name[int], parameter[call[name[config].get, parameter[constant[ssh_timeout]]]]]
if compare[name[ssh_tries] is constant[None]] begin[:]
variable[ssh_tries] assign[=] call[name[int], parameter[call[name[config].get, parameter[constant[ssh_tries], constant[10]]]]]
variable[start_time] assign[=] call[name[time].time, parameter[]]
variable[client] assign[=] call[name[paramiko].SSHClient, parameter[]]
call[name[client].set_missing_host_key_policy, parameter[call[name[paramiko].AutoAddPolicy, parameter[]]]]
while compare[name[ssh_tries] greater[>] constant[0]] begin[:]
<ast.Try object at 0x7da2041d9630>
<ast.AugAssign object at 0x7da2041d9de0>
call[name[LOGGER].debug, parameter[constant[Still got %d tries for %s], name[ssh_tries], name[host_name]]]
call[name[time].sleep, parameter[constant[1]]]
return[name[client]] | keyword[def] identifier[get_ssh_client] (
identifier[ip_addr] ,
identifier[ssh_key] = keyword[None] ,
identifier[host_name] = keyword[None] ,
identifier[ssh_tries] = keyword[None] ,
identifier[propagate_fail] = keyword[True] ,
identifier[username] = literal[string] ,
identifier[password] = literal[string] ,
):
literal[string]
identifier[host_name] = identifier[host_name] keyword[or] identifier[ip_addr]
keyword[with] identifier[LogTask] (
literal[string] % identifier[host_name] ,
identifier[level] = literal[string] ,
identifier[propagate_fail] = identifier[propagate_fail] ,
):
identifier[ssh_timeout] = identifier[int] ( identifier[config] . identifier[get] ( literal[string] ))
keyword[if] identifier[ssh_tries] keyword[is] keyword[None] :
identifier[ssh_tries] = identifier[int] ( identifier[config] . identifier[get] ( literal[string] , literal[int] ))
identifier[start_time] = identifier[time] . identifier[time] ()
identifier[client] = identifier[paramiko] . identifier[SSHClient] ()
identifier[client] . identifier[set_missing_host_key_policy] ( identifier[paramiko] . identifier[AutoAddPolicy] (),)
keyword[while] identifier[ssh_tries] > literal[int] :
keyword[try] :
identifier[client] . identifier[connect] (
identifier[ip_addr] ,
identifier[username] = identifier[username] ,
identifier[password] = identifier[password] ,
identifier[key_filename] = identifier[ssh_key] ,
identifier[timeout] = identifier[ssh_timeout] ,
)
keyword[break]
keyword[except] ( identifier[socket] . identifier[error] , identifier[socket] . identifier[timeout] ) keyword[as] identifier[err] :
identifier[LOGGER] . identifier[debug] (
literal[string] ,
identifier[host_name] ,
identifier[err] ,
)
keyword[except] identifier[paramiko] . identifier[ssh_exception] . identifier[SSHException] keyword[as] identifier[err] :
identifier[LOGGER] . identifier[debug] (
literal[string] ,
identifier[host_name] ,
identifier[err] ,
)
keyword[except] identifier[EOFError] keyword[as] identifier[err] :
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[host_name] , identifier[err] )
identifier[ssh_tries] -= literal[int]
identifier[LOGGER] . identifier[debug] (
literal[string] ,
identifier[ssh_tries] ,
identifier[host_name] ,
)
identifier[time] . identifier[sleep] ( literal[int] )
keyword[else] :
identifier[end_time] = identifier[time] . identifier[time] ()
keyword[raise] identifier[LagoSSHTimeoutException] (
literal[string] %
( identifier[end_time] - identifier[start_time] , identifier[host_name] )
)
keyword[return] identifier[client] | def get_ssh_client(ip_addr, ssh_key=None, host_name=None, ssh_tries=None, propagate_fail=True, username='root', password='123456'):
"""
Get a connected SSH client
Args:
ip_addr(str): IP address of the endpoint
ssh_key(str or list of str): Path to a file which
contains the private key
hotname(str): The hostname of the endpoint
ssh_tries(int): The number of attempts to connect to the endpoint
propagate_fail(bool): If set to true, this event will be in the log
and fail the outer stage. Otherwise, it will be discarded.
username(str): The username to authenticate with
password(str): Used for password authentication
or for private key decryption
Raises:
:exc:`~LagoSSHTimeoutException`: If the client failed to connect after
"ssh_tries"
"""
host_name = host_name or ip_addr
with LogTask('Get ssh client for %s' % host_name, level='debug', propagate_fail=propagate_fail):
ssh_timeout = int(config.get('ssh_timeout'))
if ssh_tries is None:
ssh_tries = int(config.get('ssh_tries', 10)) # depends on [control=['if'], data=['ssh_tries']]
start_time = time.time()
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
while ssh_tries > 0:
try:
client.connect(ip_addr, username=username, password=password, key_filename=ssh_key, timeout=ssh_timeout)
break # depends on [control=['try'], data=[]]
except (socket.error, socket.timeout) as err:
LOGGER.debug('Socket error connecting to %s: %s', host_name, err) # depends on [control=['except'], data=['err']]
except paramiko.ssh_exception.SSHException as err:
LOGGER.debug('SSH error connecting to %s: %s', host_name, err) # depends on [control=['except'], data=['err']]
except EOFError as err:
LOGGER.debug('EOFError connecting to %s: %s', host_name, err) # depends on [control=['except'], data=['err']]
ssh_tries -= 1
LOGGER.debug('Still got %d tries for %s', ssh_tries, host_name)
time.sleep(1) # depends on [control=['while'], data=['ssh_tries']]
else:
end_time = time.time()
raise LagoSSHTimeoutException('Timed out (in %d s) trying to ssh to %s' % (end_time - start_time, host_name)) # depends on [control=['with'], data=[]]
return client |
def formatfooter(self, previous_month, next_month):
"""
Return a footer for a previous and next month.
"""
footer = '<tfoot><tr>' \
'<td colspan="3" class="prev">%s</td>' \
'<td class="pad"> </td>' \
'<td colspan="3" class="next">%s</td>' \
'</tr></tfoot>'
if previous_month:
previous_content = '<a href="%s" class="previous-month">%s</a>' % (
reverse('zinnia:entry_archive_month', args=[
previous_month.strftime('%Y'),
previous_month.strftime('%m')]),
date_format(previous_month, 'YEAR_MONTH_FORMAT'))
else:
previous_content = ' '
if next_month:
next_content = '<a href="%s" class="next-month">%s</a>' % (
reverse('zinnia:entry_archive_month', args=[
next_month.strftime('%Y'),
next_month.strftime('%m')]),
date_format(next_month, 'YEAR_MONTH_FORMAT'))
else:
next_content = ' '
return footer % (previous_content, next_content) | def function[formatfooter, parameter[self, previous_month, next_month]]:
constant[
Return a footer for a previous and next month.
]
variable[footer] assign[=] constant[<tfoot><tr><td colspan="3" class="prev">%s</td><td class="pad"> </td><td colspan="3" class="next">%s</td></tr></tfoot>]
if name[previous_month] begin[:]
variable[previous_content] assign[=] binary_operation[constant[<a href="%s" class="previous-month">%s</a>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1d758d0>, <ast.Call object at 0x7da1b1d77a00>]]]
if name[next_month] begin[:]
variable[next_content] assign[=] binary_operation[constant[<a href="%s" class="next-month">%s</a>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1d763e0>, <ast.Call object at 0x7da1b1d77160>]]]
return[binary_operation[name[footer] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1d74eb0>, <ast.Name object at 0x7da1b1d77820>]]]] | keyword[def] identifier[formatfooter] ( identifier[self] , identifier[previous_month] , identifier[next_month] ):
literal[string]
identifier[footer] = literal[string] literal[string] literal[string] literal[string] literal[string]
keyword[if] identifier[previous_month] :
identifier[previous_content] = literal[string] %(
identifier[reverse] ( literal[string] , identifier[args] =[
identifier[previous_month] . identifier[strftime] ( literal[string] ),
identifier[previous_month] . identifier[strftime] ( literal[string] )]),
identifier[date_format] ( identifier[previous_month] , literal[string] ))
keyword[else] :
identifier[previous_content] = literal[string]
keyword[if] identifier[next_month] :
identifier[next_content] = literal[string] %(
identifier[reverse] ( literal[string] , identifier[args] =[
identifier[next_month] . identifier[strftime] ( literal[string] ),
identifier[next_month] . identifier[strftime] ( literal[string] )]),
identifier[date_format] ( identifier[next_month] , literal[string] ))
keyword[else] :
identifier[next_content] = literal[string]
keyword[return] identifier[footer] %( identifier[previous_content] , identifier[next_content] ) | def formatfooter(self, previous_month, next_month):
"""
Return a footer for a previous and next month.
"""
footer = '<tfoot><tr><td colspan="3" class="prev">%s</td><td class="pad"> </td><td colspan="3" class="next">%s</td></tr></tfoot>'
if previous_month:
previous_content = '<a href="%s" class="previous-month">%s</a>' % (reverse('zinnia:entry_archive_month', args=[previous_month.strftime('%Y'), previous_month.strftime('%m')]), date_format(previous_month, 'YEAR_MONTH_FORMAT')) # depends on [control=['if'], data=[]]
else:
previous_content = ' '
if next_month:
next_content = '<a href="%s" class="next-month">%s</a>' % (reverse('zinnia:entry_archive_month', args=[next_month.strftime('%Y'), next_month.strftime('%m')]), date_format(next_month, 'YEAR_MONTH_FORMAT')) # depends on [control=['if'], data=[]]
else:
next_content = ' '
return footer % (previous_content, next_content) |
def _get_writable_metadata(self):
"""Get the object / blob metadata which is writable.
This is intended to be used when creating a new object / blob.
See the `API reference docs`_ for more information, the fields
marked as writable are:
* ``acl``
* ``cacheControl``
* ``contentDisposition``
* ``contentEncoding``
* ``contentLanguage``
* ``contentType``
* ``crc32c``
* ``md5Hash``
* ``metadata``
* ``name``
* ``storageClass``
For now, we don't support ``acl``, access control lists should be
managed directly through :class:`ObjectACL` methods.
"""
# NOTE: This assumes `self.name` is unicode.
object_metadata = {"name": self.name}
for key in self._changes:
if key in _WRITABLE_FIELDS:
object_metadata[key] = self._properties[key]
return object_metadata | def function[_get_writable_metadata, parameter[self]]:
constant[Get the object / blob metadata which is writable.
This is intended to be used when creating a new object / blob.
See the `API reference docs`_ for more information, the fields
marked as writable are:
* ``acl``
* ``cacheControl``
* ``contentDisposition``
* ``contentEncoding``
* ``contentLanguage``
* ``contentType``
* ``crc32c``
* ``md5Hash``
* ``metadata``
* ``name``
* ``storageClass``
For now, we don't support ``acl``, access control lists should be
managed directly through :class:`ObjectACL` methods.
]
variable[object_metadata] assign[=] dictionary[[<ast.Constant object at 0x7da1b2345a80>], [<ast.Attribute object at 0x7da20c6c54b0>]]
for taget[name[key]] in starred[name[self]._changes] begin[:]
if compare[name[key] in name[_WRITABLE_FIELDS]] begin[:]
call[name[object_metadata]][name[key]] assign[=] call[name[self]._properties][name[key]]
return[name[object_metadata]] | keyword[def] identifier[_get_writable_metadata] ( identifier[self] ):
literal[string]
identifier[object_metadata] ={ literal[string] : identifier[self] . identifier[name] }
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_changes] :
keyword[if] identifier[key] keyword[in] identifier[_WRITABLE_FIELDS] :
identifier[object_metadata] [ identifier[key] ]= identifier[self] . identifier[_properties] [ identifier[key] ]
keyword[return] identifier[object_metadata] | def _get_writable_metadata(self):
"""Get the object / blob metadata which is writable.
This is intended to be used when creating a new object / blob.
See the `API reference docs`_ for more information, the fields
marked as writable are:
* ``acl``
* ``cacheControl``
* ``contentDisposition``
* ``contentEncoding``
* ``contentLanguage``
* ``contentType``
* ``crc32c``
* ``md5Hash``
* ``metadata``
* ``name``
* ``storageClass``
For now, we don't support ``acl``, access control lists should be
managed directly through :class:`ObjectACL` methods.
"""
# NOTE: This assumes `self.name` is unicode.
object_metadata = {'name': self.name}
for key in self._changes:
if key in _WRITABLE_FIELDS:
object_metadata[key] = self._properties[key] # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']]
return object_metadata |
def throw(self, type, value=None, traceback=None): # pylint: disable=redefined-builtin
"""Raise an exception in this element"""
return self.__wrapped__.throw(type, value, traceback) | def function[throw, parameter[self, type, value, traceback]]:
constant[Raise an exception in this element]
return[call[name[self].__wrapped__.throw, parameter[name[type], name[value], name[traceback]]]] | keyword[def] identifier[throw] ( identifier[self] , identifier[type] , identifier[value] = keyword[None] , identifier[traceback] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[__wrapped__] . identifier[throw] ( identifier[type] , identifier[value] , identifier[traceback] ) | def throw(self, type, value=None, traceback=None): # pylint: disable=redefined-builtin
'Raise an exception in this element'
return self.__wrapped__.throw(type, value, traceback) |
def organize(**kwargs):
"""Read reactions from non-organized folder"""
# do argument wrangling before turning it into an obect
# since namedtuples are immutable
if len(kwargs['adsorbates']) == 0:
print("""Warning: no adsorbates specified,
can't pick up reaction reaction energies.""")
print(" Enter adsorbates like so --adsorbates CO,O,CO2")
print(" [Comma-separated list without spaces.]")
kwargs['adsorbates'] = list(map(
lambda x: (''.join(sorted(string2symbols(x)))),
kwargs['adsorbates'].split(','),
))
if kwargs['energy_corrections']:
e_c_dict = {}
for e_c in kwargs['energy_corrections'].split(','):
key, value = e_c.split('=')
e_c_dict.update({key: float(value)})
kwargs['energy_corrections'] = e_c_dict
options = collections.namedtuple(
'options',
kwargs.keys()
)(**kwargs)
_organize.main(options=options) | def function[organize, parameter[]]:
constant[Read reactions from non-organized folder]
if compare[call[name[len], parameter[call[name[kwargs]][constant[adsorbates]]]] equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[Warning: no adsorbates specified,
can't pick up reaction reaction energies.]]]
call[name[print], parameter[constant[ Enter adsorbates like so --adsorbates CO,O,CO2]]]
call[name[print], parameter[constant[ [Comma-separated list without spaces.]]]]
call[name[kwargs]][constant[adsorbates]] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da20c76e0b0>, call[call[name[kwargs]][constant[adsorbates]].split, parameter[constant[,]]]]]]]
if call[name[kwargs]][constant[energy_corrections]] begin[:]
variable[e_c_dict] assign[=] dictionary[[], []]
for taget[name[e_c]] in starred[call[call[name[kwargs]][constant[energy_corrections]].split, parameter[constant[,]]]] begin[:]
<ast.Tuple object at 0x7da20c76f730> assign[=] call[name[e_c].split, parameter[constant[=]]]
call[name[e_c_dict].update, parameter[dictionary[[<ast.Name object at 0x7da20c76e200>], [<ast.Call object at 0x7da20c76d6c0>]]]]
call[name[kwargs]][constant[energy_corrections]] assign[=] name[e_c_dict]
variable[options] assign[=] call[call[name[collections].namedtuple, parameter[constant[options], call[name[kwargs].keys, parameter[]]]], parameter[]]
call[name[_organize].main, parameter[]] | keyword[def] identifier[organize] (** identifier[kwargs] ):
literal[string]
keyword[if] identifier[len] ( identifier[kwargs] [ literal[string] ])== literal[int] :
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[print] ( literal[string] )
identifier[kwargs] [ literal[string] ]= identifier[list] ( identifier[map] (
keyword[lambda] identifier[x] :( literal[string] . identifier[join] ( identifier[sorted] ( identifier[string2symbols] ( identifier[x] )))),
identifier[kwargs] [ literal[string] ]. identifier[split] ( literal[string] ),
))
keyword[if] identifier[kwargs] [ literal[string] ]:
identifier[e_c_dict] ={}
keyword[for] identifier[e_c] keyword[in] identifier[kwargs] [ literal[string] ]. identifier[split] ( literal[string] ):
identifier[key] , identifier[value] = identifier[e_c] . identifier[split] ( literal[string] )
identifier[e_c_dict] . identifier[update] ({ identifier[key] : identifier[float] ( identifier[value] )})
identifier[kwargs] [ literal[string] ]= identifier[e_c_dict]
identifier[options] = identifier[collections] . identifier[namedtuple] (
literal[string] ,
identifier[kwargs] . identifier[keys] ()
)(** identifier[kwargs] )
identifier[_organize] . identifier[main] ( identifier[options] = identifier[options] ) | def organize(**kwargs):
"""Read reactions from non-organized folder"""
# do argument wrangling before turning it into an obect
# since namedtuples are immutable
if len(kwargs['adsorbates']) == 0:
print("Warning: no adsorbates specified,\n can't pick up reaction reaction energies.")
print(' Enter adsorbates like so --adsorbates CO,O,CO2')
print(' [Comma-separated list without spaces.]') # depends on [control=['if'], data=[]]
kwargs['adsorbates'] = list(map(lambda x: ''.join(sorted(string2symbols(x))), kwargs['adsorbates'].split(',')))
if kwargs['energy_corrections']:
e_c_dict = {}
for e_c in kwargs['energy_corrections'].split(','):
(key, value) = e_c.split('=')
e_c_dict.update({key: float(value)}) # depends on [control=['for'], data=['e_c']]
kwargs['energy_corrections'] = e_c_dict # depends on [control=['if'], data=[]]
options = collections.namedtuple('options', kwargs.keys())(**kwargs)
_organize.main(options=options) |
def predict(self, X):
"""
Use the POU value we computed in fit to choose randomly between GPEi and
uniform random selection.
"""
if np.random.random() < self.POU:
# choose params at random to avoid local minima
return Uniform(self.tunables).predict(X)
return super(GPEiVelocity, self).predict(X) | def function[predict, parameter[self, X]]:
constant[
Use the POU value we computed in fit to choose randomly between GPEi and
uniform random selection.
]
if compare[call[name[np].random.random, parameter[]] less[<] name[self].POU] begin[:]
return[call[call[name[Uniform], parameter[name[self].tunables]].predict, parameter[name[X]]]]
return[call[call[name[super], parameter[name[GPEiVelocity], name[self]]].predict, parameter[name[X]]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[X] ):
literal[string]
keyword[if] identifier[np] . identifier[random] . identifier[random] ()< identifier[self] . identifier[POU] :
keyword[return] identifier[Uniform] ( identifier[self] . identifier[tunables] ). identifier[predict] ( identifier[X] )
keyword[return] identifier[super] ( identifier[GPEiVelocity] , identifier[self] ). identifier[predict] ( identifier[X] ) | def predict(self, X):
"""
Use the POU value we computed in fit to choose randomly between GPEi and
uniform random selection.
"""
if np.random.random() < self.POU:
# choose params at random to avoid local minima
return Uniform(self.tunables).predict(X) # depends on [control=['if'], data=[]]
return super(GPEiVelocity, self).predict(X) |
def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
"""
if self.url:
url = self.url % kwargs
elif self.pattern_name:
try:
url = reverse(self.pattern_name, args=args, kwargs=kwargs)
except NoReverseMatch:
return None
else:
return None
return url | def function[get_redirect_url, parameter[self]]:
constant[
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
]
if name[self].url begin[:]
variable[url] assign[=] binary_operation[name[self].url <ast.Mod object at 0x7da2590d6920> name[kwargs]]
return[name[url]] | keyword[def] identifier[get_redirect_url] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[self] . identifier[url] :
identifier[url] = identifier[self] . identifier[url] % identifier[kwargs]
keyword[elif] identifier[self] . identifier[pattern_name] :
keyword[try] :
identifier[url] = identifier[reverse] ( identifier[self] . identifier[pattern_name] , identifier[args] = identifier[args] , identifier[kwargs] = identifier[kwargs] )
keyword[except] identifier[NoReverseMatch] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] keyword[None]
keyword[return] identifier[url] | def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
"""
if self.url:
url = self.url % kwargs # depends on [control=['if'], data=[]]
elif self.pattern_name:
try:
url = reverse(self.pattern_name, args=args, kwargs=kwargs) # depends on [control=['try'], data=[]]
except NoReverseMatch:
return None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
return None
return url |
def write_spo(sub, prd, obj):
'''
write triples to a buffer incase we decide to drop them
'''
rcvtriples.append(make_spo(sub, prd, obj)) | def function[write_spo, parameter[sub, prd, obj]]:
constant[
write triples to a buffer incase we decide to drop them
]
call[name[rcvtriples].append, parameter[call[name[make_spo], parameter[name[sub], name[prd], name[obj]]]]] | keyword[def] identifier[write_spo] ( identifier[sub] , identifier[prd] , identifier[obj] ):
literal[string]
identifier[rcvtriples] . identifier[append] ( identifier[make_spo] ( identifier[sub] , identifier[prd] , identifier[obj] )) | def write_spo(sub, prd, obj):
"""
write triples to a buffer incase we decide to drop them
"""
rcvtriples.append(make_spo(sub, prd, obj)) |
def _bisect(self, begin, end, listener):
"""This method search for the zero-crossing of the watched parameter
Args:
begin (Orbit):
end (Orbit)
listener (Listener)
Return
Return
"""
step = (end.date - begin.date) / 2
while abs(step) >= self._eps_bisect:
date = begin.date + step
if self.SPEAKER_MODE == "global":
orb = self.propagate(date)
else:
orb = begin.propagate(date)
if listener(begin) * listener(orb) > 0:
begin = orb
else:
end = orb
step = (end.date - begin.date) / 2
else:
end.event = listener.info(end)
return end | def function[_bisect, parameter[self, begin, end, listener]]:
constant[This method search for the zero-crossing of the watched parameter
Args:
begin (Orbit):
end (Orbit)
listener (Listener)
Return
Return
]
variable[step] assign[=] binary_operation[binary_operation[name[end].date - name[begin].date] / constant[2]]
while compare[call[name[abs], parameter[name[step]]] greater_or_equal[>=] name[self]._eps_bisect] begin[:]
variable[date] assign[=] binary_operation[name[begin].date + name[step]]
if compare[name[self].SPEAKER_MODE equal[==] constant[global]] begin[:]
variable[orb] assign[=] call[name[self].propagate, parameter[name[date]]]
if compare[binary_operation[call[name[listener], parameter[name[begin]]] * call[name[listener], parameter[name[orb]]]] greater[>] constant[0]] begin[:]
variable[begin] assign[=] name[orb]
variable[step] assign[=] binary_operation[binary_operation[name[end].date - name[begin].date] / constant[2]] | keyword[def] identifier[_bisect] ( identifier[self] , identifier[begin] , identifier[end] , identifier[listener] ):
literal[string]
identifier[step] =( identifier[end] . identifier[date] - identifier[begin] . identifier[date] )/ literal[int]
keyword[while] identifier[abs] ( identifier[step] )>= identifier[self] . identifier[_eps_bisect] :
identifier[date] = identifier[begin] . identifier[date] + identifier[step]
keyword[if] identifier[self] . identifier[SPEAKER_MODE] == literal[string] :
identifier[orb] = identifier[self] . identifier[propagate] ( identifier[date] )
keyword[else] :
identifier[orb] = identifier[begin] . identifier[propagate] ( identifier[date] )
keyword[if] identifier[listener] ( identifier[begin] )* identifier[listener] ( identifier[orb] )> literal[int] :
identifier[begin] = identifier[orb]
keyword[else] :
identifier[end] = identifier[orb]
identifier[step] =( identifier[end] . identifier[date] - identifier[begin] . identifier[date] )/ literal[int]
keyword[else] :
identifier[end] . identifier[event] = identifier[listener] . identifier[info] ( identifier[end] )
keyword[return] identifier[end] | def _bisect(self, begin, end, listener):
"""This method search for the zero-crossing of the watched parameter
Args:
begin (Orbit):
end (Orbit)
listener (Listener)
Return
Return
"""
step = (end.date - begin.date) / 2
while abs(step) >= self._eps_bisect:
date = begin.date + step
if self.SPEAKER_MODE == 'global':
orb = self.propagate(date) # depends on [control=['if'], data=[]]
else:
orb = begin.propagate(date)
if listener(begin) * listener(orb) > 0:
begin = orb # depends on [control=['if'], data=[]]
else:
end = orb
step = (end.date - begin.date) / 2 # depends on [control=['while'], data=[]]
else:
end.event = listener.info(end)
return end |
def vnormg(v, ndim):
"""
Compute the magnitude of a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnormg_c.html
:param v: Vector whose magnitude is to be found.
:type v: Array of floats
:param ndim: Dimension of v
:type ndim: int
:return: magnitude of v calculated in a numerically stable way
:rtype: float
"""
v = stypes.toDoubleVector(v)
ndim = ctypes.c_int(ndim)
return libspice.vnormg_c(v, ndim) | def function[vnormg, parameter[v, ndim]]:
constant[
Compute the magnitude of a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnormg_c.html
:param v: Vector whose magnitude is to be found.
:type v: Array of floats
:param ndim: Dimension of v
:type ndim: int
:return: magnitude of v calculated in a numerically stable way
:rtype: float
]
variable[v] assign[=] call[name[stypes].toDoubleVector, parameter[name[v]]]
variable[ndim] assign[=] call[name[ctypes].c_int, parameter[name[ndim]]]
return[call[name[libspice].vnormg_c, parameter[name[v], name[ndim]]]] | keyword[def] identifier[vnormg] ( identifier[v] , identifier[ndim] ):
literal[string]
identifier[v] = identifier[stypes] . identifier[toDoubleVector] ( identifier[v] )
identifier[ndim] = identifier[ctypes] . identifier[c_int] ( identifier[ndim] )
keyword[return] identifier[libspice] . identifier[vnormg_c] ( identifier[v] , identifier[ndim] ) | def vnormg(v, ndim):
"""
Compute the magnitude of a double precision vector of arbitrary dimension.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/vnormg_c.html
:param v: Vector whose magnitude is to be found.
:type v: Array of floats
:param ndim: Dimension of v
:type ndim: int
:return: magnitude of v calculated in a numerically stable way
:rtype: float
"""
v = stypes.toDoubleVector(v)
ndim = ctypes.c_int(ndim)
return libspice.vnormg_c(v, ndim) |
def get(key, profile=None): # pylint: disable=W0613
'''
Get a value from the dictionary
'''
data = _get_values(profile)
# Decrypt SDB data if specified in the profile
if profile and profile.get('gpg', False):
return salt.utils.data.traverse_dict_and_list(_decrypt(data), key, None)
return salt.utils.data.traverse_dict_and_list(data, key, None) | def function[get, parameter[key, profile]]:
constant[
Get a value from the dictionary
]
variable[data] assign[=] call[name[_get_values], parameter[name[profile]]]
if <ast.BoolOp object at 0x7da1b1c4a170> begin[:]
return[call[name[salt].utils.data.traverse_dict_and_list, parameter[call[name[_decrypt], parameter[name[data]]], name[key], constant[None]]]]
return[call[name[salt].utils.data.traverse_dict_and_list, parameter[name[data], name[key], constant[None]]]] | keyword[def] identifier[get] ( identifier[key] , identifier[profile] = keyword[None] ):
literal[string]
identifier[data] = identifier[_get_values] ( identifier[profile] )
keyword[if] identifier[profile] keyword[and] identifier[profile] . identifier[get] ( literal[string] , keyword[False] ):
keyword[return] identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] ( identifier[_decrypt] ( identifier[data] ), identifier[key] , keyword[None] )
keyword[return] identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] ( identifier[data] , identifier[key] , keyword[None] ) | def get(key, profile=None): # pylint: disable=W0613
'\n Get a value from the dictionary\n '
data = _get_values(profile)
# Decrypt SDB data if specified in the profile
if profile and profile.get('gpg', False):
return salt.utils.data.traverse_dict_and_list(_decrypt(data), key, None) # depends on [control=['if'], data=[]]
return salt.utils.data.traverse_dict_and_list(data, key, None) |
def get_product_by_name(self, name):
'''
Gets a Item from the Menu by name. Note that the name is not
case-sensitive but must be spelt correctly.
:param string name: The name of the item.
:raises StopIteration: Raises exception if no item is found.
:return: An item object matching the search.
:rtype: Item
'''
return next(i for i in self.items if i.name.lower() == name.lower()) | def function[get_product_by_name, parameter[self, name]]:
constant[
Gets a Item from the Menu by name. Note that the name is not
case-sensitive but must be spelt correctly.
:param string name: The name of the item.
:raises StopIteration: Raises exception if no item is found.
:return: An item object matching the search.
:rtype: Item
]
return[call[name[next], parameter[<ast.GeneratorExp object at 0x7da1b0399ed0>]]] | keyword[def] identifier[get_product_by_name] ( identifier[self] , identifier[name] ):
literal[string]
keyword[return] identifier[next] ( identifier[i] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[items] keyword[if] identifier[i] . identifier[name] . identifier[lower] ()== identifier[name] . identifier[lower] ()) | def get_product_by_name(self, name):
"""
Gets a Item from the Menu by name. Note that the name is not
case-sensitive but must be spelt correctly.
:param string name: The name of the item.
:raises StopIteration: Raises exception if no item is found.
:return: An item object matching the search.
:rtype: Item
"""
return next((i for i in self.items if i.name.lower() == name.lower())) |
def _build_config_tree(name, configuration):
'''
Build the configuration tree.
The root object is _current_statement.
'''
type_, id_, options = _get_type_id_options(name, configuration)
global _INDENT, _current_statement
_INDENT = ''
if type_ == 'config':
_current_statement = GivenStatement(options)
elif type_ == 'log':
_current_statement = UnnamedStatement(type='log')
_parse_log_statement(options)
else:
if _is_statement_unnamed(type_):
_current_statement = UnnamedStatement(type=type_)
else:
_current_statement = NamedStatement(type=type_, id=id_)
_parse_statement(options) | def function[_build_config_tree, parameter[name, configuration]]:
constant[
Build the configuration tree.
The root object is _current_statement.
]
<ast.Tuple object at 0x7da1b1c36800> assign[=] call[name[_get_type_id_options], parameter[name[name], name[configuration]]]
<ast.Global object at 0x7da1b1c34e50>
variable[_INDENT] assign[=] constant[]
if compare[name[type_] equal[==] constant[config]] begin[:]
variable[_current_statement] assign[=] call[name[GivenStatement], parameter[name[options]]] | keyword[def] identifier[_build_config_tree] ( identifier[name] , identifier[configuration] ):
literal[string]
identifier[type_] , identifier[id_] , identifier[options] = identifier[_get_type_id_options] ( identifier[name] , identifier[configuration] )
keyword[global] identifier[_INDENT] , identifier[_current_statement]
identifier[_INDENT] = literal[string]
keyword[if] identifier[type_] == literal[string] :
identifier[_current_statement] = identifier[GivenStatement] ( identifier[options] )
keyword[elif] identifier[type_] == literal[string] :
identifier[_current_statement] = identifier[UnnamedStatement] ( identifier[type] = literal[string] )
identifier[_parse_log_statement] ( identifier[options] )
keyword[else] :
keyword[if] identifier[_is_statement_unnamed] ( identifier[type_] ):
identifier[_current_statement] = identifier[UnnamedStatement] ( identifier[type] = identifier[type_] )
keyword[else] :
identifier[_current_statement] = identifier[NamedStatement] ( identifier[type] = identifier[type_] , identifier[id] = identifier[id_] )
identifier[_parse_statement] ( identifier[options] ) | def _build_config_tree(name, configuration):
"""
Build the configuration tree.
The root object is _current_statement.
"""
(type_, id_, options) = _get_type_id_options(name, configuration)
global _INDENT, _current_statement
_INDENT = ''
if type_ == 'config':
_current_statement = GivenStatement(options) # depends on [control=['if'], data=[]]
elif type_ == 'log':
_current_statement = UnnamedStatement(type='log')
_parse_log_statement(options) # depends on [control=['if'], data=[]]
else:
if _is_statement_unnamed(type_):
_current_statement = UnnamedStatement(type=type_) # depends on [control=['if'], data=[]]
else:
_current_statement = NamedStatement(type=type_, id=id_)
_parse_statement(options) |
def _ta_plot(self,study,periods=14,column=None,include=True,str='{name}({period})',detail=False,
theme=None,sharing=None,filename='',asFigure=False,**iplot_kwargs):
"""
Generates a Technical Study Chart
Parameters:
-----------
study : string
Technical Study to be charted
sma - 'Simple Moving Average'
rsi - 'R Strength Indicator'
periods : int
Number of periods
column : string
Name of the column on which the
study will be done
include : bool
Indicates if the input column(s)
should be included in the chart
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
detail : bool
If True the supporting data/calculations
are included in the chart
study_colors : string or [string]
Colors to be used for the studies
Study Specific Parameters
-------------------------
RSI
rsi_upper : int (0,100]
Level for the upper rsi band
default : 70
rsi_lower : int (0,100]
Level for the lower rsi band
default : 30
CCI
cci_upper : int
Level for the upper cci band
default : 100
cci_lower : int
Level for the lower cci band
default : -100
BOLL
boll_std : int or float
Number of standard deviations
MACD
fast_period : int
Number of periods for the fast moving average
slow_period : int
Number of periods for the slow moving average
signal_period : int
Number of periods for the signal
CORREL
how : string
Method for the correlation calculation
values
pct_cht
diff
"""
if 'columns' in iplot_kwargs:
column=iplot_kwargs.pop('columns')
if 'period' in iplot_kwargs:
periods=iplot_kwargs.pop('period')
if 'world_readable' in iplot_kwargs:
sharing=iplot_kwargs.pop('world_readable')
if 'study_color' in iplot_kwargs:
iplot_kwargs['study_colors']=iplot_kwargs.pop('study_color')
if sharing is None:
sharing = auth.get_config_file()['sharing']
if isinstance(sharing,bool):
if sharing:
sharing='public'
else:
sharing='private'
iplot_kwargs['sharing']=sharing
if theme is None:
theme = iplot_kwargs.pop('study_theme',auth.get_config_file()['theme'])
if not filename:
if 'title' in iplot_kwargs:
filename=iplot_kwargs['title']
else:
filename='Plotly Playground {0}'.format(time.strftime("%Y-%m-%d %H:%M:%S"))
def get_subplots(figures):
shape=(len(figures),1)
layout=tools.get_base_layout(figures)
subplots=tools.subplots(figures,shape=shape,shared_xaxes=True,base_layout=layout)
if len(figures)==2:
subplots['layout']['yaxis']['domain']=[.27,1.0]
subplots['layout']['yaxis2']['domain']=[0,.25]
return subplots
def get_study(df,func,iplot_kwargs,iplot_study_kwargs,str=None,include=False,column=None,inset=False):
df=df.copy()
if inset:
if not column:
if isinstance(df,pd.DataFrame):
column=df.keys().tolist()
else:
df=pd.DataFrame(df)
column=df.keys().tolist()
if 'legend' in iplot_kwargs:
iplot_study_kwargs['legend']=iplot_kwargs['legend']
fig_0=df.figure(**iplot_kwargs)
df_ta=func(df,column=column,include=False,str=str,**study_kwargs)
kind=iplot_kwargs['kind'] if 'kind' in iplot_kwargs else ''
iplot_study_kwargs['kind']='scatter'
iplot_study_kwargs['colors']=iplot_study_kwargs.get('colors',['blue','green','red'] if study=='dmi' else 'blue')
fig_1=df_ta.figure(theme=theme,**iplot_study_kwargs)
if kind in ['candle','ohlc']:
for i in fig_1['data']:
i['x']=[pd.Timestamp(_) for _ in i['x']]
if inset:
figure=tools.merge_figures([fig_0,fig_1]) if include else fig_1
else:
figure=get_subplots([fig_0,fig_1]) if include else fig_1
return figure
study_kwargs={}
iplot_study_kwargs={}
study_kwargs=check_kwargs(iplot_kwargs,__TA_KWARGS,{},clean_origin=True)
iplot_study_kwargs=kwargs_from_keyword(iplot_kwargs,{},'study')
study_kwargs.update({'periods':periods})
ta_func = eval('ta.{0}'.format(study))
inset=study in ('sma','boll','ema','atr','ptps')
figure=get_study(self,ta_func,iplot_kwargs,iplot_study_kwargs,include=include,
column=column,str=str,inset=inset)
## Add Bands
if study in ('rsi','cci'):
bands= {'rsi':(30,70),
'cci':(-100,100)}
_upper=study_kwargs.get('{0}_upper'.format(study),bands[study][0])
_lower=study_kwargs.get('{0}_lower'.format(study),bands[study][1])
yref='y2' if include else 'y1'
shapes=[tools.get_shape(y=i,yref=yref,color=j,dash='dash') for (i,j) in [(_lower,'green'),(_upper,'red')]]
figure['layout']['shapes']=shapes
# if study=='rsi':
# rsi_upper=study_kwargs.get('rsi_upper',70)
# rsi_lower=study_kwargs.get('rsi_lower',30)
# yref='y2' if include else 'y1'
# shapes=[tools.get_shape(y=i,yref=yref,color=j,dash='dash') for (i,j) in [(rsi_lower,'green'),(rsi_upper,'red')]]
# figure['layout']['shapes']=shapes
# if study=='cci':
# cci_upper=study_kwargs.get('cci_upper',100)
# cci_lower=study_kwargs.get('cci_lower',-100)
# yref='y2' if include else 'y1'
# shapes=[tools.get_shape(y=i,yref=yref,color=j,dash='dash') for (i,j) in [(cci_lower,'green'),(cci_upper,'red')]]
# figure['layout']['shapes']=shapes
## Exports
if asFigure:
return figure
else:
return iplot(figure,sharing=sharing,filename=filename) | def function[_ta_plot, parameter[self, study, periods, column, include, str, detail, theme, sharing, filename, asFigure]]:
constant[
Generates a Technical Study Chart
Parameters:
-----------
study : string
Technical Study to be charted
sma - 'Simple Moving Average'
rsi - 'R Strength Indicator'
periods : int
Number of periods
column : string
Name of the column on which the
study will be done
include : bool
Indicates if the input column(s)
should be included in the chart
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
detail : bool
If True the supporting data/calculations
are included in the chart
study_colors : string or [string]
Colors to be used for the studies
Study Specific Parameters
-------------------------
RSI
rsi_upper : int (0,100]
Level for the upper rsi band
default : 70
rsi_lower : int (0,100]
Level for the lower rsi band
default : 30
CCI
cci_upper : int
Level for the upper cci band
default : 100
cci_lower : int
Level for the lower cci band
default : -100
BOLL
boll_std : int or float
Number of standard deviations
MACD
fast_period : int
Number of periods for the fast moving average
slow_period : int
Number of periods for the slow moving average
signal_period : int
Number of periods for the signal
CORREL
how : string
Method for the correlation calculation
values
pct_cht
diff
]
if compare[constant[columns] in name[iplot_kwargs]] begin[:]
variable[column] assign[=] call[name[iplot_kwargs].pop, parameter[constant[columns]]]
if compare[constant[period] in name[iplot_kwargs]] begin[:]
variable[periods] assign[=] call[name[iplot_kwargs].pop, parameter[constant[period]]]
if compare[constant[world_readable] in name[iplot_kwargs]] begin[:]
variable[sharing] assign[=] call[name[iplot_kwargs].pop, parameter[constant[world_readable]]]
if compare[constant[study_color] in name[iplot_kwargs]] begin[:]
call[name[iplot_kwargs]][constant[study_colors]] assign[=] call[name[iplot_kwargs].pop, parameter[constant[study_color]]]
if compare[name[sharing] is constant[None]] begin[:]
variable[sharing] assign[=] call[call[name[auth].get_config_file, parameter[]]][constant[sharing]]
if call[name[isinstance], parameter[name[sharing], name[bool]]] begin[:]
if name[sharing] begin[:]
variable[sharing] assign[=] constant[public]
call[name[iplot_kwargs]][constant[sharing]] assign[=] name[sharing]
if compare[name[theme] is constant[None]] begin[:]
variable[theme] assign[=] call[name[iplot_kwargs].pop, parameter[constant[study_theme], call[call[name[auth].get_config_file, parameter[]]][constant[theme]]]]
if <ast.UnaryOp object at 0x7da1b1c58970> begin[:]
if compare[constant[title] in name[iplot_kwargs]] begin[:]
variable[filename] assign[=] call[name[iplot_kwargs]][constant[title]]
def function[get_subplots, parameter[figures]]:
variable[shape] assign[=] tuple[[<ast.Call object at 0x7da1b1cfb7c0>, <ast.Constant object at 0x7da1b1cfb850>]]
variable[layout] assign[=] call[name[tools].get_base_layout, parameter[name[figures]]]
variable[subplots] assign[=] call[name[tools].subplots, parameter[name[figures]]]
if compare[call[name[len], parameter[name[figures]]] equal[==] constant[2]] begin[:]
call[call[call[name[subplots]][constant[layout]]][constant[yaxis]]][constant[domain]] assign[=] list[[<ast.Constant object at 0x7da1b1cee890>, <ast.Constant object at 0x7da1b1cee860>]]
call[call[call[name[subplots]][constant[layout]]][constant[yaxis2]]][constant[domain]] assign[=] list[[<ast.Constant object at 0x7da1b1cee680>, <ast.Constant object at 0x7da1b1cee650>]]
return[name[subplots]]
def function[get_study, parameter[df, func, iplot_kwargs, iplot_study_kwargs, str, include, column, inset]]:
variable[df] assign[=] call[name[df].copy, parameter[]]
if name[inset] begin[:]
if <ast.UnaryOp object at 0x7da1b1cee290> begin[:]
if call[name[isinstance], parameter[name[df], name[pd].DataFrame]] begin[:]
variable[column] assign[=] call[call[name[df].keys, parameter[]].tolist, parameter[]]
if compare[constant[legend] in name[iplot_kwargs]] begin[:]
call[name[iplot_study_kwargs]][constant[legend]] assign[=] call[name[iplot_kwargs]][constant[legend]]
variable[fig_0] assign[=] call[name[df].figure, parameter[]]
variable[df_ta] assign[=] call[name[func], parameter[name[df]]]
variable[kind] assign[=] <ast.IfExp object at 0x7da1b1cfece0>
call[name[iplot_study_kwargs]][constant[kind]] assign[=] constant[scatter]
call[name[iplot_study_kwargs]][constant[colors]] assign[=] call[name[iplot_study_kwargs].get, parameter[constant[colors], <ast.IfExp object at 0x7da1b1cff0d0>]]
variable[fig_1] assign[=] call[name[df_ta].figure, parameter[]]
if compare[name[kind] in list[[<ast.Constant object at 0x7da1b1cff4f0>, <ast.Constant object at 0x7da1b1cff520>]]] begin[:]
for taget[name[i]] in starred[call[name[fig_1]][constant[data]]] begin[:]
call[name[i]][constant[x]] assign[=] <ast.ListComp object at 0x7da1b1cff700>
if name[inset] begin[:]
variable[figure] assign[=] <ast.IfExp object at 0x7da1b1cff9d0>
return[name[figure]]
variable[study_kwargs] assign[=] dictionary[[], []]
variable[iplot_study_kwargs] assign[=] dictionary[[], []]
variable[study_kwargs] assign[=] call[name[check_kwargs], parameter[name[iplot_kwargs], name[__TA_KWARGS], dictionary[[], []]]]
variable[iplot_study_kwargs] assign[=] call[name[kwargs_from_keyword], parameter[name[iplot_kwargs], dictionary[[], []], constant[study]]]
call[name[study_kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1cfe3b0>], [<ast.Name object at 0x7da1b1cfe380>]]]]
variable[ta_func] assign[=] call[name[eval], parameter[call[constant[ta.{0}].format, parameter[name[study]]]]]
variable[inset] assign[=] compare[name[study] in tuple[[<ast.Constant object at 0x7da1b1cfe080>, <ast.Constant object at 0x7da1b1cfe050>, <ast.Constant object at 0x7da1b1cfe020>, <ast.Constant object at 0x7da1b1cfdff0>, <ast.Constant object at 0x7da1b1c30670>]]]
variable[figure] assign[=] call[name[get_study], parameter[name[self], name[ta_func], name[iplot_kwargs], name[iplot_study_kwargs]]]
if compare[name[study] in tuple[[<ast.Constant object at 0x7da1b1c332b0>, <ast.Constant object at 0x7da1b1c31600>]]] begin[:]
variable[bands] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c33ee0>, <ast.Constant object at 0x7da1b1c30e20>], [<ast.Tuple object at 0x7da1b1c32560>, <ast.Tuple object at 0x7da1b1c31a50>]]
variable[_upper] assign[=] call[name[study_kwargs].get, parameter[call[constant[{0}_upper].format, parameter[name[study]]], call[call[name[bands]][name[study]]][constant[0]]]]
variable[_lower] assign[=] call[name[study_kwargs].get, parameter[call[constant[{0}_lower].format, parameter[name[study]]], call[call[name[bands]][name[study]]][constant[1]]]]
variable[yref] assign[=] <ast.IfExp object at 0x7da1b1c315a0>
variable[shapes] assign[=] <ast.ListComp object at 0x7da1b1c33fa0>
call[call[name[figure]][constant[layout]]][constant[shapes]] assign[=] name[shapes]
if name[asFigure] begin[:]
return[name[figure]] | keyword[def] identifier[_ta_plot] ( identifier[self] , identifier[study] , identifier[periods] = literal[int] , identifier[column] = keyword[None] , identifier[include] = keyword[True] , identifier[str] = literal[string] , identifier[detail] = keyword[False] ,
identifier[theme] = keyword[None] , identifier[sharing] = keyword[None] , identifier[filename] = literal[string] , identifier[asFigure] = keyword[False] ,** identifier[iplot_kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[column] = identifier[iplot_kwargs] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[periods] = identifier[iplot_kwargs] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[sharing] = identifier[iplot_kwargs] . identifier[pop] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[iplot_kwargs] [ literal[string] ]= identifier[iplot_kwargs] . identifier[pop] ( literal[string] )
keyword[if] identifier[sharing] keyword[is] keyword[None] :
identifier[sharing] = identifier[auth] . identifier[get_config_file] ()[ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[sharing] , identifier[bool] ):
keyword[if] identifier[sharing] :
identifier[sharing] = literal[string]
keyword[else] :
identifier[sharing] = literal[string]
identifier[iplot_kwargs] [ literal[string] ]= identifier[sharing]
keyword[if] identifier[theme] keyword[is] keyword[None] :
identifier[theme] = identifier[iplot_kwargs] . identifier[pop] ( literal[string] , identifier[auth] . identifier[get_config_file] ()[ literal[string] ])
keyword[if] keyword[not] identifier[filename] :
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[filename] = identifier[iplot_kwargs] [ literal[string] ]
keyword[else] :
identifier[filename] = literal[string] . identifier[format] ( identifier[time] . identifier[strftime] ( literal[string] ))
keyword[def] identifier[get_subplots] ( identifier[figures] ):
identifier[shape] =( identifier[len] ( identifier[figures] ), literal[int] )
identifier[layout] = identifier[tools] . identifier[get_base_layout] ( identifier[figures] )
identifier[subplots] = identifier[tools] . identifier[subplots] ( identifier[figures] , identifier[shape] = identifier[shape] , identifier[shared_xaxes] = keyword[True] , identifier[base_layout] = identifier[layout] )
keyword[if] identifier[len] ( identifier[figures] )== literal[int] :
identifier[subplots] [ literal[string] ][ literal[string] ][ literal[string] ]=[ literal[int] , literal[int] ]
identifier[subplots] [ literal[string] ][ literal[string] ][ literal[string] ]=[ literal[int] , literal[int] ]
keyword[return] identifier[subplots]
keyword[def] identifier[get_study] ( identifier[df] , identifier[func] , identifier[iplot_kwargs] , identifier[iplot_study_kwargs] , identifier[str] = keyword[None] , identifier[include] = keyword[False] , identifier[column] = keyword[None] , identifier[inset] = keyword[False] ):
identifier[df] = identifier[df] . identifier[copy] ()
keyword[if] identifier[inset] :
keyword[if] keyword[not] identifier[column] :
keyword[if] identifier[isinstance] ( identifier[df] , identifier[pd] . identifier[DataFrame] ):
identifier[column] = identifier[df] . identifier[keys] (). identifier[tolist] ()
keyword[else] :
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[df] )
identifier[column] = identifier[df] . identifier[keys] (). identifier[tolist] ()
keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] :
identifier[iplot_study_kwargs] [ literal[string] ]= identifier[iplot_kwargs] [ literal[string] ]
identifier[fig_0] = identifier[df] . identifier[figure] (** identifier[iplot_kwargs] )
identifier[df_ta] = identifier[func] ( identifier[df] , identifier[column] = identifier[column] , identifier[include] = keyword[False] , identifier[str] = identifier[str] ,** identifier[study_kwargs] )
identifier[kind] = identifier[iplot_kwargs] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[iplot_kwargs] keyword[else] literal[string]
identifier[iplot_study_kwargs] [ literal[string] ]= literal[string]
identifier[iplot_study_kwargs] [ literal[string] ]= identifier[iplot_study_kwargs] . identifier[get] ( literal[string] ,[ literal[string] , literal[string] , literal[string] ] keyword[if] identifier[study] == literal[string] keyword[else] literal[string] )
identifier[fig_1] = identifier[df_ta] . identifier[figure] ( identifier[theme] = identifier[theme] ,** identifier[iplot_study_kwargs] )
keyword[if] identifier[kind] keyword[in] [ literal[string] , literal[string] ]:
keyword[for] identifier[i] keyword[in] identifier[fig_1] [ literal[string] ]:
identifier[i] [ literal[string] ]=[ identifier[pd] . identifier[Timestamp] ( identifier[_] ) keyword[for] identifier[_] keyword[in] identifier[i] [ literal[string] ]]
keyword[if] identifier[inset] :
identifier[figure] = identifier[tools] . identifier[merge_figures] ([ identifier[fig_0] , identifier[fig_1] ]) keyword[if] identifier[include] keyword[else] identifier[fig_1]
keyword[else] :
identifier[figure] = identifier[get_subplots] ([ identifier[fig_0] , identifier[fig_1] ]) keyword[if] identifier[include] keyword[else] identifier[fig_1]
keyword[return] identifier[figure]
identifier[study_kwargs] ={}
identifier[iplot_study_kwargs] ={}
identifier[study_kwargs] = identifier[check_kwargs] ( identifier[iplot_kwargs] , identifier[__TA_KWARGS] ,{}, identifier[clean_origin] = keyword[True] )
identifier[iplot_study_kwargs] = identifier[kwargs_from_keyword] ( identifier[iplot_kwargs] ,{}, literal[string] )
identifier[study_kwargs] . identifier[update] ({ literal[string] : identifier[periods] })
identifier[ta_func] = identifier[eval] ( literal[string] . identifier[format] ( identifier[study] ))
identifier[inset] = identifier[study] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] )
identifier[figure] = identifier[get_study] ( identifier[self] , identifier[ta_func] , identifier[iplot_kwargs] , identifier[iplot_study_kwargs] , identifier[include] = identifier[include] ,
identifier[column] = identifier[column] , identifier[str] = identifier[str] , identifier[inset] = identifier[inset] )
keyword[if] identifier[study] keyword[in] ( literal[string] , literal[string] ):
identifier[bands] ={ literal[string] :( literal[int] , literal[int] ),
literal[string] :(- literal[int] , literal[int] )}
identifier[_upper] = identifier[study_kwargs] . identifier[get] ( literal[string] . identifier[format] ( identifier[study] ), identifier[bands] [ identifier[study] ][ literal[int] ])
identifier[_lower] = identifier[study_kwargs] . identifier[get] ( literal[string] . identifier[format] ( identifier[study] ), identifier[bands] [ identifier[study] ][ literal[int] ])
identifier[yref] = literal[string] keyword[if] identifier[include] keyword[else] literal[string]
identifier[shapes] =[ identifier[tools] . identifier[get_shape] ( identifier[y] = identifier[i] , identifier[yref] = identifier[yref] , identifier[color] = identifier[j] , identifier[dash] = literal[string] ) keyword[for] ( identifier[i] , identifier[j] ) keyword[in] [( identifier[_lower] , literal[string] ),( identifier[_upper] , literal[string] )]]
identifier[figure] [ literal[string] ][ literal[string] ]= identifier[shapes]
keyword[if] identifier[asFigure] :
keyword[return] identifier[figure]
keyword[else] :
keyword[return] identifier[iplot] ( identifier[figure] , identifier[sharing] = identifier[sharing] , identifier[filename] = identifier[filename] ) | def _ta_plot(self, study, periods=14, column=None, include=True, str='{name}({period})', detail=False, theme=None, sharing=None, filename='', asFigure=False, **iplot_kwargs):
"""
Generates a Technical Study Chart
Parameters:
-----------
study : string
Technical Study to be charted
sma - 'Simple Moving Average'
rsi - 'R Strength Indicator'
periods : int
Number of periods
column : string
Name of the column on which the
study will be done
include : bool
Indicates if the input column(s)
should be included in the chart
str : string
Label factory for studies
The following wildcards can be used:
{name} : Name of the column
{study} : Name of the study
{period} : Period used
Examples:
'study: {study} - period: {period}'
detail : bool
If True the supporting data/calculations
are included in the chart
study_colors : string or [string]
Colors to be used for the studies
Study Specific Parameters
-------------------------
RSI
rsi_upper : int (0,100]
Level for the upper rsi band
default : 70
rsi_lower : int (0,100]
Level for the lower rsi band
default : 30
CCI
cci_upper : int
Level for the upper cci band
default : 100
cci_lower : int
Level for the lower cci band
default : -100
BOLL
boll_std : int or float
Number of standard deviations
MACD
fast_period : int
Number of periods for the fast moving average
slow_period : int
Number of periods for the slow moving average
signal_period : int
Number of periods for the signal
CORREL
how : string
Method for the correlation calculation
values
pct_cht
diff
"""
if 'columns' in iplot_kwargs:
column = iplot_kwargs.pop('columns') # depends on [control=['if'], data=['iplot_kwargs']]
if 'period' in iplot_kwargs:
periods = iplot_kwargs.pop('period') # depends on [control=['if'], data=['iplot_kwargs']]
if 'world_readable' in iplot_kwargs:
sharing = iplot_kwargs.pop('world_readable') # depends on [control=['if'], data=['iplot_kwargs']]
if 'study_color' in iplot_kwargs:
iplot_kwargs['study_colors'] = iplot_kwargs.pop('study_color') # depends on [control=['if'], data=['iplot_kwargs']]
if sharing is None:
sharing = auth.get_config_file()['sharing'] # depends on [control=['if'], data=['sharing']]
if isinstance(sharing, bool):
if sharing:
sharing = 'public' # depends on [control=['if'], data=[]]
else:
sharing = 'private' # depends on [control=['if'], data=[]]
iplot_kwargs['sharing'] = sharing
if theme is None:
theme = iplot_kwargs.pop('study_theme', auth.get_config_file()['theme']) # depends on [control=['if'], data=['theme']]
if not filename:
if 'title' in iplot_kwargs:
filename = iplot_kwargs['title'] # depends on [control=['if'], data=['iplot_kwargs']]
else:
filename = 'Plotly Playground {0}'.format(time.strftime('%Y-%m-%d %H:%M:%S')) # depends on [control=['if'], data=[]]
def get_subplots(figures):
shape = (len(figures), 1)
layout = tools.get_base_layout(figures)
subplots = tools.subplots(figures, shape=shape, shared_xaxes=True, base_layout=layout)
if len(figures) == 2:
subplots['layout']['yaxis']['domain'] = [0.27, 1.0]
subplots['layout']['yaxis2']['domain'] = [0, 0.25] # depends on [control=['if'], data=[]]
return subplots
def get_study(df, func, iplot_kwargs, iplot_study_kwargs, str=None, include=False, column=None, inset=False):
df = df.copy()
if inset:
if not column:
if isinstance(df, pd.DataFrame):
column = df.keys().tolist() # depends on [control=['if'], data=[]]
else:
df = pd.DataFrame(df)
column = df.keys().tolist() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 'legend' in iplot_kwargs:
iplot_study_kwargs['legend'] = iplot_kwargs['legend'] # depends on [control=['if'], data=['iplot_kwargs']]
fig_0 = df.figure(**iplot_kwargs)
df_ta = func(df, column=column, include=False, str=str, **study_kwargs)
kind = iplot_kwargs['kind'] if 'kind' in iplot_kwargs else ''
iplot_study_kwargs['kind'] = 'scatter'
iplot_study_kwargs['colors'] = iplot_study_kwargs.get('colors', ['blue', 'green', 'red'] if study == 'dmi' else 'blue')
fig_1 = df_ta.figure(theme=theme, **iplot_study_kwargs)
if kind in ['candle', 'ohlc']:
for i in fig_1['data']:
i['x'] = [pd.Timestamp(_) for _ in i['x']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
if inset:
figure = tools.merge_figures([fig_0, fig_1]) if include else fig_1 # depends on [control=['if'], data=[]]
else:
figure = get_subplots([fig_0, fig_1]) if include else fig_1
return figure
study_kwargs = {}
iplot_study_kwargs = {}
study_kwargs = check_kwargs(iplot_kwargs, __TA_KWARGS, {}, clean_origin=True)
iplot_study_kwargs = kwargs_from_keyword(iplot_kwargs, {}, 'study')
study_kwargs.update({'periods': periods})
ta_func = eval('ta.{0}'.format(study))
inset = study in ('sma', 'boll', 'ema', 'atr', 'ptps')
figure = get_study(self, ta_func, iplot_kwargs, iplot_study_kwargs, include=include, column=column, str=str, inset=inset) ## Add Bands
if study in ('rsi', 'cci'):
bands = {'rsi': (30, 70), 'cci': (-100, 100)}
_upper = study_kwargs.get('{0}_upper'.format(study), bands[study][0])
_lower = study_kwargs.get('{0}_lower'.format(study), bands[study][1])
yref = 'y2' if include else 'y1'
shapes = [tools.get_shape(y=i, yref=yref, color=j, dash='dash') for (i, j) in [(_lower, 'green'), (_upper, 'red')]]
figure['layout']['shapes'] = shapes # depends on [control=['if'], data=['study']] # if study=='rsi':
# rsi_upper=study_kwargs.get('rsi_upper',70)
# rsi_lower=study_kwargs.get('rsi_lower',30)
# yref='y2' if include else 'y1'
# shapes=[tools.get_shape(y=i,yref=yref,color=j,dash='dash') for (i,j) in [(rsi_lower,'green'),(rsi_upper,'red')]]
# figure['layout']['shapes']=shapes
# if study=='cci':
# cci_upper=study_kwargs.get('cci_upper',100)
# cci_lower=study_kwargs.get('cci_lower',-100)
# yref='y2' if include else 'y1'
# shapes=[tools.get_shape(y=i,yref=yref,color=j,dash='dash') for (i,j) in [(cci_lower,'green'),(cci_upper,'red')]]
# figure['layout']['shapes']=shapes
## Exports
if asFigure:
return figure # depends on [control=['if'], data=[]]
else:
return iplot(figure, sharing=sharing, filename=filename) |
def load(data, udf, data_dir, overwrite):
"""Load Ibis test data and build/upload UDFs"""
con = make_ibis_client(ENV)
# validate our environment before performing possibly expensive operations
if not can_write_to_hdfs(con):
raise IbisError('Failed to write to HDFS; check your settings')
if udf and not can_build_udfs():
raise IbisError('Build environment does not support building UDFs')
# load the data files
if data:
load_impala_data(con, str(data_dir), overwrite)
else:
logger.info('Skipping Ibis test data load (--no-data)')
# build and upload the UDFs
if udf:
already_loaded = is_udf_loaded(con)
logger.info('Attempting to build and load test UDFs')
if already_loaded and not overwrite:
logger.info('UDFs already loaded and not overwriting; moving on')
else:
if already_loaded:
logger.info('UDFs already loaded; attempting to overwrite')
logger.info('Building UDFs')
build_udfs()
logger.info('Uploading UDFs')
upload_udfs(con)
else:
logger.info('Skipping UDF build/load (--no-udf)') | def function[load, parameter[data, udf, data_dir, overwrite]]:
constant[Load Ibis test data and build/upload UDFs]
variable[con] assign[=] call[name[make_ibis_client], parameter[name[ENV]]]
if <ast.UnaryOp object at 0x7da20c795d20> begin[:]
<ast.Raise object at 0x7da20c795660>
if <ast.BoolOp object at 0x7da20c795fc0> begin[:]
<ast.Raise object at 0x7da20c7963e0>
if name[data] begin[:]
call[name[load_impala_data], parameter[name[con], call[name[str], parameter[name[data_dir]]], name[overwrite]]]
if name[udf] begin[:]
variable[already_loaded] assign[=] call[name[is_udf_loaded], parameter[name[con]]]
call[name[logger].info, parameter[constant[Attempting to build and load test UDFs]]]
if <ast.BoolOp object at 0x7da20c7c9b70> begin[:]
call[name[logger].info, parameter[constant[UDFs already loaded and not overwriting; moving on]]] | keyword[def] identifier[load] ( identifier[data] , identifier[udf] , identifier[data_dir] , identifier[overwrite] ):
literal[string]
identifier[con] = identifier[make_ibis_client] ( identifier[ENV] )
keyword[if] keyword[not] identifier[can_write_to_hdfs] ( identifier[con] ):
keyword[raise] identifier[IbisError] ( literal[string] )
keyword[if] identifier[udf] keyword[and] keyword[not] identifier[can_build_udfs] ():
keyword[raise] identifier[IbisError] ( literal[string] )
keyword[if] identifier[data] :
identifier[load_impala_data] ( identifier[con] , identifier[str] ( identifier[data_dir] ), identifier[overwrite] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[udf] :
identifier[already_loaded] = identifier[is_udf_loaded] ( identifier[con] )
identifier[logger] . identifier[info] ( literal[string] )
keyword[if] identifier[already_loaded] keyword[and] keyword[not] identifier[overwrite] :
identifier[logger] . identifier[info] ( literal[string] )
keyword[else] :
keyword[if] identifier[already_loaded] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[logger] . identifier[info] ( literal[string] )
identifier[build_udfs] ()
identifier[logger] . identifier[info] ( literal[string] )
identifier[upload_udfs] ( identifier[con] )
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] ) | def load(data, udf, data_dir, overwrite):
"""Load Ibis test data and build/upload UDFs"""
con = make_ibis_client(ENV)
# validate our environment before performing possibly expensive operations
if not can_write_to_hdfs(con):
raise IbisError('Failed to write to HDFS; check your settings') # depends on [control=['if'], data=[]]
if udf and (not can_build_udfs()):
raise IbisError('Build environment does not support building UDFs') # depends on [control=['if'], data=[]]
# load the data files
if data:
load_impala_data(con, str(data_dir), overwrite) # depends on [control=['if'], data=[]]
else:
logger.info('Skipping Ibis test data load (--no-data)')
# build and upload the UDFs
if udf:
already_loaded = is_udf_loaded(con)
logger.info('Attempting to build and load test UDFs')
if already_loaded and (not overwrite):
logger.info('UDFs already loaded and not overwriting; moving on') # depends on [control=['if'], data=[]]
else:
if already_loaded:
logger.info('UDFs already loaded; attempting to overwrite') # depends on [control=['if'], data=[]]
logger.info('Building UDFs')
build_udfs()
logger.info('Uploading UDFs')
upload_udfs(con) # depends on [control=['if'], data=[]]
else:
logger.info('Skipping UDF build/load (--no-udf)') |
def render_template(template):
"""
takes a template to render to and returns a function that
takes an object to render the data for this template.
If callable_or_dict is callable, it will be called with
the request and any additional arguments to produce the
template paramaters. This is useful for a view-like function
that returns a dict-like object instead of an HttpResponse.
Otherwise, callable_or_dict is used as the parameters for
the rendered response.
"""
def outer_wrapper(callable_or_dict=None, statuscode=None, **kwargs):
def wrapper(request, *args, **wrapper_kwargs):
if callable(callable_or_dict):
params = callable_or_dict(request, *args, **wrapper_kwargs)
else:
params = callable_or_dict
# If we want to return some other response type we can,
# that simply overrides the default behavior
if params is None or isinstance(params, dict):
resp = render(request, template, params, **kwargs)
else:
resp = params
if statuscode:
resp.status_code = statuscode
return resp
return wrapper
return outer_wrapper | def function[render_template, parameter[template]]:
constant[
takes a template to render to and returns a function that
takes an object to render the data for this template.
If callable_or_dict is callable, it will be called with
the request and any additional arguments to produce the
template paramaters. This is useful for a view-like function
that returns a dict-like object instead of an HttpResponse.
Otherwise, callable_or_dict is used as the parameters for
the rendered response.
]
def function[outer_wrapper, parameter[callable_or_dict, statuscode]]:
def function[wrapper, parameter[request]]:
if call[name[callable], parameter[name[callable_or_dict]]] begin[:]
variable[params] assign[=] call[name[callable_or_dict], parameter[name[request], <ast.Starred object at 0x7da18fe918a0>]]
if <ast.BoolOp object at 0x7da1b141dea0> begin[:]
variable[resp] assign[=] call[name[render], parameter[name[request], name[template], name[params]]]
if name[statuscode] begin[:]
name[resp].status_code assign[=] name[statuscode]
return[name[resp]]
return[name[wrapper]]
return[name[outer_wrapper]] | keyword[def] identifier[render_template] ( identifier[template] ):
literal[string]
keyword[def] identifier[outer_wrapper] ( identifier[callable_or_dict] = keyword[None] , identifier[statuscode] = keyword[None] ,** identifier[kwargs] ):
keyword[def] identifier[wrapper] ( identifier[request] ,* identifier[args] ,** identifier[wrapper_kwargs] ):
keyword[if] identifier[callable] ( identifier[callable_or_dict] ):
identifier[params] = identifier[callable_or_dict] ( identifier[request] ,* identifier[args] ,** identifier[wrapper_kwargs] )
keyword[else] :
identifier[params] = identifier[callable_or_dict]
keyword[if] identifier[params] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[params] , identifier[dict] ):
identifier[resp] = identifier[render] ( identifier[request] , identifier[template] , identifier[params] ,** identifier[kwargs] )
keyword[else] :
identifier[resp] = identifier[params]
keyword[if] identifier[statuscode] :
identifier[resp] . identifier[status_code] = identifier[statuscode]
keyword[return] identifier[resp]
keyword[return] identifier[wrapper]
keyword[return] identifier[outer_wrapper] | def render_template(template):
"""
takes a template to render to and returns a function that
takes an object to render the data for this template.
If callable_or_dict is callable, it will be called with
the request and any additional arguments to produce the
template paramaters. This is useful for a view-like function
that returns a dict-like object instead of an HttpResponse.
Otherwise, callable_or_dict is used as the parameters for
the rendered response.
"""
def outer_wrapper(callable_or_dict=None, statuscode=None, **kwargs):
def wrapper(request, *args, **wrapper_kwargs):
if callable(callable_or_dict):
params = callable_or_dict(request, *args, **wrapper_kwargs) # depends on [control=['if'], data=[]]
else:
params = callable_or_dict
# If we want to return some other response type we can,
# that simply overrides the default behavior
if params is None or isinstance(params, dict):
resp = render(request, template, params, **kwargs) # depends on [control=['if'], data=[]]
else:
resp = params
if statuscode:
resp.status_code = statuscode # depends on [control=['if'], data=[]]
return resp
return wrapper
return outer_wrapper |
def _bibliography(doc, terms, converters=[], format='html'):
"""
Render citations, from a document or a doct of dicts
If the input is a dict, each key is the name of the citation, and the value is a BibTex
formatted dict
:param doc: A MetatabDoc, or a dict of BibTex dicts
:return:
"""
output_backend = 'latex' if format == 'latex' else MetatabHtmlBackend
def mk_cite(v):
for c in converters:
r = c(v)
if r is not False:
return r
return make_citation_dict(v)
if isinstance(doc, MetatabDoc):
# This doesn't work for LaTex, b/c the formatter adds the prologue and epilogue to eery entry
d = [mk_cite(t) for t in terms]
cd = {e['name_link']: e for e in d}
else:
cd = {k: mk_cite(v, i) for i, (k, v) in enumerate(doc.items())}
# for k, v in cd.items():
# print (k, v)
return PybtexEngine().format_from_string(safe_dump({'entries': cd}),
style=MetatabStyle,
output_backend=output_backend,
bib_format='yaml') | def function[_bibliography, parameter[doc, terms, converters, format]]:
constant[
Render citations, from a document or a doct of dicts
If the input is a dict, each key is the name of the citation, and the value is a BibTex
formatted dict
:param doc: A MetatabDoc, or a dict of BibTex dicts
:return:
]
variable[output_backend] assign[=] <ast.IfExp object at 0x7da1b19af910>
def function[mk_cite, parameter[v]]:
for taget[name[c]] in starred[name[converters]] begin[:]
variable[r] assign[=] call[name[c], parameter[name[v]]]
if compare[name[r] is_not constant[False]] begin[:]
return[name[r]]
return[call[name[make_citation_dict], parameter[name[v]]]]
if call[name[isinstance], parameter[name[doc], name[MetatabDoc]]] begin[:]
variable[d] assign[=] <ast.ListComp object at 0x7da1b18add20>
variable[cd] assign[=] <ast.DictComp object at 0x7da1b18ae380>
return[call[call[name[PybtexEngine], parameter[]].format_from_string, parameter[call[name[safe_dump], parameter[dictionary[[<ast.Constant object at 0x7da1b18acdc0>], [<ast.Name object at 0x7da1b18acd90>]]]]]]] | keyword[def] identifier[_bibliography] ( identifier[doc] , identifier[terms] , identifier[converters] =[], identifier[format] = literal[string] ):
literal[string]
identifier[output_backend] = literal[string] keyword[if] identifier[format] == literal[string] keyword[else] identifier[MetatabHtmlBackend]
keyword[def] identifier[mk_cite] ( identifier[v] ):
keyword[for] identifier[c] keyword[in] identifier[converters] :
identifier[r] = identifier[c] ( identifier[v] )
keyword[if] identifier[r] keyword[is] keyword[not] keyword[False] :
keyword[return] identifier[r]
keyword[return] identifier[make_citation_dict] ( identifier[v] )
keyword[if] identifier[isinstance] ( identifier[doc] , identifier[MetatabDoc] ):
identifier[d] =[ identifier[mk_cite] ( identifier[t] ) keyword[for] identifier[t] keyword[in] identifier[terms] ]
identifier[cd] ={ identifier[e] [ literal[string] ]: identifier[e] keyword[for] identifier[e] keyword[in] identifier[d] }
keyword[else] :
identifier[cd] ={ identifier[k] : identifier[mk_cite] ( identifier[v] , identifier[i] ) keyword[for] identifier[i] ,( identifier[k] , identifier[v] ) keyword[in] identifier[enumerate] ( identifier[doc] . identifier[items] ())}
keyword[return] identifier[PybtexEngine] (). identifier[format_from_string] ( identifier[safe_dump] ({ literal[string] : identifier[cd] }),
identifier[style] = identifier[MetatabStyle] ,
identifier[output_backend] = identifier[output_backend] ,
identifier[bib_format] = literal[string] ) | def _bibliography(doc, terms, converters=[], format='html'):
"""
Render citations, from a document or a doct of dicts
If the input is a dict, each key is the name of the citation, and the value is a BibTex
formatted dict
:param doc: A MetatabDoc, or a dict of BibTex dicts
:return:
"""
output_backend = 'latex' if format == 'latex' else MetatabHtmlBackend
def mk_cite(v):
for c in converters:
r = c(v)
if r is not False:
return r # depends on [control=['if'], data=['r']] # depends on [control=['for'], data=['c']]
return make_citation_dict(v)
if isinstance(doc, MetatabDoc):
# This doesn't work for LaTex, b/c the formatter adds the prologue and epilogue to eery entry
d = [mk_cite(t) for t in terms]
cd = {e['name_link']: e for e in d} # depends on [control=['if'], data=[]]
else:
cd = {k: mk_cite(v, i) for (i, (k, v)) in enumerate(doc.items())}
# for k, v in cd.items():
# print (k, v)
return PybtexEngine().format_from_string(safe_dump({'entries': cd}), style=MetatabStyle, output_backend=output_backend, bib_format='yaml') |
def tree(self, path):
'''
.. versionadded:: 2014.7.0
Recurse through etcd and return all values
'''
ret = {}
try:
items = self.read(path)
except (etcd.EtcdKeyNotFound, ValueError):
return None
except etcd.EtcdConnectionFailed:
log.error("etcd: failed to perform 'tree' operation on path %s due to connection error", path)
return None
for item in items.children:
comps = six.text_type(item.key).split('/')
if item.dir is True:
if item.key == path:
continue
ret[comps[-1]] = self.tree(item.key)
else:
ret[comps[-1]] = item.value
return ret | def function[tree, parameter[self, path]]:
constant[
.. versionadded:: 2014.7.0
Recurse through etcd and return all values
]
variable[ret] assign[=] dictionary[[], []]
<ast.Try object at 0x7da18ede58a0>
for taget[name[item]] in starred[name[items].children] begin[:]
variable[comps] assign[=] call[call[name[six].text_type, parameter[name[item].key]].split, parameter[constant[/]]]
if compare[name[item].dir is constant[True]] begin[:]
if compare[name[item].key equal[==] name[path]] begin[:]
continue
call[name[ret]][call[name[comps]][<ast.UnaryOp object at 0x7da18ede5d50>]] assign[=] call[name[self].tree, parameter[name[item].key]]
return[name[ret]] | keyword[def] identifier[tree] ( identifier[self] , identifier[path] ):
literal[string]
identifier[ret] ={}
keyword[try] :
identifier[items] = identifier[self] . identifier[read] ( identifier[path] )
keyword[except] ( identifier[etcd] . identifier[EtcdKeyNotFound] , identifier[ValueError] ):
keyword[return] keyword[None]
keyword[except] identifier[etcd] . identifier[EtcdConnectionFailed] :
identifier[log] . identifier[error] ( literal[string] , identifier[path] )
keyword[return] keyword[None]
keyword[for] identifier[item] keyword[in] identifier[items] . identifier[children] :
identifier[comps] = identifier[six] . identifier[text_type] ( identifier[item] . identifier[key] ). identifier[split] ( literal[string] )
keyword[if] identifier[item] . identifier[dir] keyword[is] keyword[True] :
keyword[if] identifier[item] . identifier[key] == identifier[path] :
keyword[continue]
identifier[ret] [ identifier[comps] [- literal[int] ]]= identifier[self] . identifier[tree] ( identifier[item] . identifier[key] )
keyword[else] :
identifier[ret] [ identifier[comps] [- literal[int] ]]= identifier[item] . identifier[value]
keyword[return] identifier[ret] | def tree(self, path):
"""
.. versionadded:: 2014.7.0
Recurse through etcd and return all values
"""
ret = {}
try:
items = self.read(path) # depends on [control=['try'], data=[]]
except (etcd.EtcdKeyNotFound, ValueError):
return None # depends on [control=['except'], data=[]]
except etcd.EtcdConnectionFailed:
log.error("etcd: failed to perform 'tree' operation on path %s due to connection error", path)
return None # depends on [control=['except'], data=[]]
for item in items.children:
comps = six.text_type(item.key).split('/')
if item.dir is True:
if item.key == path:
continue # depends on [control=['if'], data=[]]
ret[comps[-1]] = self.tree(item.key) # depends on [control=['if'], data=[]]
else:
ret[comps[-1]] = item.value # depends on [control=['for'], data=['item']]
return ret |
def read_csv(filename, keys=None, convert_types=False, **kwargs):
'''
Read a CSV in canonical form: ::
<agent_id, t_step, key, value, value_type>
'''
df = pd.read_csv(filename)
if convert_types:
df = convert_types_slow(df)
if keys:
df = df[df['key'].isin(keys)]
df = process_one(df)
return df | def function[read_csv, parameter[filename, keys, convert_types]]:
constant[
Read a CSV in canonical form: ::
<agent_id, t_step, key, value, value_type>
]
variable[df] assign[=] call[name[pd].read_csv, parameter[name[filename]]]
if name[convert_types] begin[:]
variable[df] assign[=] call[name[convert_types_slow], parameter[name[df]]]
if name[keys] begin[:]
variable[df] assign[=] call[name[df]][call[call[name[df]][constant[key]].isin, parameter[name[keys]]]]
variable[df] assign[=] call[name[process_one], parameter[name[df]]]
return[name[df]] | keyword[def] identifier[read_csv] ( identifier[filename] , identifier[keys] = keyword[None] , identifier[convert_types] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[df] = identifier[pd] . identifier[read_csv] ( identifier[filename] )
keyword[if] identifier[convert_types] :
identifier[df] = identifier[convert_types_slow] ( identifier[df] )
keyword[if] identifier[keys] :
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]. identifier[isin] ( identifier[keys] )]
identifier[df] = identifier[process_one] ( identifier[df] )
keyword[return] identifier[df] | def read_csv(filename, keys=None, convert_types=False, **kwargs):
"""
Read a CSV in canonical form: ::
<agent_id, t_step, key, value, value_type>
"""
df = pd.read_csv(filename)
if convert_types:
df = convert_types_slow(df) # depends on [control=['if'], data=[]]
if keys:
df = df[df['key'].isin(keys)] # depends on [control=['if'], data=[]]
df = process_one(df)
return df |
def set_stop_handler(self):
"""
Initializes functions that are invoked when the user or OS wants to kill this process.
:return:
"""
signal.signal(signal.SIGTERM, self.graceful_stop)
signal.signal(signal.SIGABRT, self.graceful_stop)
signal.signal(signal.SIGINT, self.graceful_stop) | def function[set_stop_handler, parameter[self]]:
constant[
Initializes functions that are invoked when the user or OS wants to kill this process.
:return:
]
call[name[signal].signal, parameter[name[signal].SIGTERM, name[self].graceful_stop]]
call[name[signal].signal, parameter[name[signal].SIGABRT, name[self].graceful_stop]]
call[name[signal].signal, parameter[name[signal].SIGINT, name[self].graceful_stop]] | keyword[def] identifier[set_stop_handler] ( identifier[self] ):
literal[string]
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGTERM] , identifier[self] . identifier[graceful_stop] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGABRT] , identifier[self] . identifier[graceful_stop] )
identifier[signal] . identifier[signal] ( identifier[signal] . identifier[SIGINT] , identifier[self] . identifier[graceful_stop] ) | def set_stop_handler(self):
"""
Initializes functions that are invoked when the user or OS wants to kill this process.
:return:
"""
signal.signal(signal.SIGTERM, self.graceful_stop)
signal.signal(signal.SIGABRT, self.graceful_stop)
signal.signal(signal.SIGINT, self.graceful_stop) |
def _read_packet(self):
"""
Reads and decodes a single packet
Reads a single packet from the device and
stores the data from it in the current Command
object
"""
# Grab command, send it and decode response
cmd = self._commands_to_read.popleft()
try:
raw_data = self._interface.read()
raw_data = bytearray(raw_data)
decoded_data = cmd.decode_data(raw_data)
except Exception as exception:
self._abort_all_transfers(exception)
raise
decoded_data = bytearray(decoded_data)
self._command_response_buf.extend(decoded_data)
# Attach data to transfers
pos = 0
while True:
size_left = len(self._command_response_buf) - pos
if size_left == 0:
# If size left is 0 then the transfer list might
# be empty, so don't try to access element 0
break
transfer = self._transfer_list[0]
size = transfer.get_data_size()
if size > size_left:
break
self._transfer_list.popleft()
data = self._command_response_buf[pos:pos + size]
pos += size
transfer.add_response(data)
# Remove used data from _command_response_buf
if pos > 0:
self._command_response_buf = self._command_response_buf[pos:] | def function[_read_packet, parameter[self]]:
constant[
Reads and decodes a single packet
Reads a single packet from the device and
stores the data from it in the current Command
object
]
variable[cmd] assign[=] call[name[self]._commands_to_read.popleft, parameter[]]
<ast.Try object at 0x7da1b18dd090>
variable[decoded_data] assign[=] call[name[bytearray], parameter[name[decoded_data]]]
call[name[self]._command_response_buf.extend, parameter[name[decoded_data]]]
variable[pos] assign[=] constant[0]
while constant[True] begin[:]
variable[size_left] assign[=] binary_operation[call[name[len], parameter[name[self]._command_response_buf]] - name[pos]]
if compare[name[size_left] equal[==] constant[0]] begin[:]
break
variable[transfer] assign[=] call[name[self]._transfer_list][constant[0]]
variable[size] assign[=] call[name[transfer].get_data_size, parameter[]]
if compare[name[size] greater[>] name[size_left]] begin[:]
break
call[name[self]._transfer_list.popleft, parameter[]]
variable[data] assign[=] call[name[self]._command_response_buf][<ast.Slice object at 0x7da1b18acdf0>]
<ast.AugAssign object at 0x7da1b18ac490>
call[name[transfer].add_response, parameter[name[data]]]
if compare[name[pos] greater[>] constant[0]] begin[:]
name[self]._command_response_buf assign[=] call[name[self]._command_response_buf][<ast.Slice object at 0x7da1b18ae7a0>] | keyword[def] identifier[_read_packet] ( identifier[self] ):
literal[string]
identifier[cmd] = identifier[self] . identifier[_commands_to_read] . identifier[popleft] ()
keyword[try] :
identifier[raw_data] = identifier[self] . identifier[_interface] . identifier[read] ()
identifier[raw_data] = identifier[bytearray] ( identifier[raw_data] )
identifier[decoded_data] = identifier[cmd] . identifier[decode_data] ( identifier[raw_data] )
keyword[except] identifier[Exception] keyword[as] identifier[exception] :
identifier[self] . identifier[_abort_all_transfers] ( identifier[exception] )
keyword[raise]
identifier[decoded_data] = identifier[bytearray] ( identifier[decoded_data] )
identifier[self] . identifier[_command_response_buf] . identifier[extend] ( identifier[decoded_data] )
identifier[pos] = literal[int]
keyword[while] keyword[True] :
identifier[size_left] = identifier[len] ( identifier[self] . identifier[_command_response_buf] )- identifier[pos]
keyword[if] identifier[size_left] == literal[int] :
keyword[break]
identifier[transfer] = identifier[self] . identifier[_transfer_list] [ literal[int] ]
identifier[size] = identifier[transfer] . identifier[get_data_size] ()
keyword[if] identifier[size] > identifier[size_left] :
keyword[break]
identifier[self] . identifier[_transfer_list] . identifier[popleft] ()
identifier[data] = identifier[self] . identifier[_command_response_buf] [ identifier[pos] : identifier[pos] + identifier[size] ]
identifier[pos] += identifier[size]
identifier[transfer] . identifier[add_response] ( identifier[data] )
keyword[if] identifier[pos] > literal[int] :
identifier[self] . identifier[_command_response_buf] = identifier[self] . identifier[_command_response_buf] [ identifier[pos] :] | def _read_packet(self):
"""
Reads and decodes a single packet
Reads a single packet from the device and
stores the data from it in the current Command
object
"""
# Grab command, send it and decode response
cmd = self._commands_to_read.popleft()
try:
raw_data = self._interface.read()
raw_data = bytearray(raw_data)
decoded_data = cmd.decode_data(raw_data) # depends on [control=['try'], data=[]]
except Exception as exception:
self._abort_all_transfers(exception)
raise # depends on [control=['except'], data=['exception']]
decoded_data = bytearray(decoded_data)
self._command_response_buf.extend(decoded_data)
# Attach data to transfers
pos = 0
while True:
size_left = len(self._command_response_buf) - pos
if size_left == 0:
# If size left is 0 then the transfer list might
# be empty, so don't try to access element 0
break # depends on [control=['if'], data=[]]
transfer = self._transfer_list[0]
size = transfer.get_data_size()
if size > size_left:
break # depends on [control=['if'], data=[]]
self._transfer_list.popleft()
data = self._command_response_buf[pos:pos + size]
pos += size
transfer.add_response(data) # depends on [control=['while'], data=[]]
# Remove used data from _command_response_buf
if pos > 0:
self._command_response_buf = self._command_response_buf[pos:] # depends on [control=['if'], data=['pos']] |
def store(self, result, commit=None, **kwargs):
"""
Store a result in a JSON file attaching git meta information.
Parameters
----------
result : memote.MemoteResult
The dictionary structure of results.
commit : str, optional
Unique hexsha of the desired commit.
kwargs :
Passed to parent function.
"""
git_info = self.record_git_info(commit)
self.add_git(result.meta, git_info)
filename = self.get_filename(git_info)
super(RepoResultManager, self).store(
result, filename=filename, **kwargs) | def function[store, parameter[self, result, commit]]:
constant[
Store a result in a JSON file attaching git meta information.
Parameters
----------
result : memote.MemoteResult
The dictionary structure of results.
commit : str, optional
Unique hexsha of the desired commit.
kwargs :
Passed to parent function.
]
variable[git_info] assign[=] call[name[self].record_git_info, parameter[name[commit]]]
call[name[self].add_git, parameter[name[result].meta, name[git_info]]]
variable[filename] assign[=] call[name[self].get_filename, parameter[name[git_info]]]
call[call[name[super], parameter[name[RepoResultManager], name[self]]].store, parameter[name[result]]] | keyword[def] identifier[store] ( identifier[self] , identifier[result] , identifier[commit] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[git_info] = identifier[self] . identifier[record_git_info] ( identifier[commit] )
identifier[self] . identifier[add_git] ( identifier[result] . identifier[meta] , identifier[git_info] )
identifier[filename] = identifier[self] . identifier[get_filename] ( identifier[git_info] )
identifier[super] ( identifier[RepoResultManager] , identifier[self] ). identifier[store] (
identifier[result] , identifier[filename] = identifier[filename] ,** identifier[kwargs] ) | def store(self, result, commit=None, **kwargs):
"""
Store a result in a JSON file attaching git meta information.
Parameters
----------
result : memote.MemoteResult
The dictionary structure of results.
commit : str, optional
Unique hexsha of the desired commit.
kwargs :
Passed to parent function.
"""
git_info = self.record_git_info(commit)
self.add_git(result.meta, git_info)
filename = self.get_filename(git_info)
super(RepoResultManager, self).store(result, filename=filename, **kwargs) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.