code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def secure_boot(self):
"""Property to provide reference to `SecureBoot` instance
It is calculated once when the first time it is queried. On refresh,
this property gets reset.
"""
return secure_boot.SecureBoot(
self._conn, utils.get_subresource_path_by(self, 'SecureBoot'),
redfish_version=self.redfish_version) | def function[secure_boot, parameter[self]]:
constant[Property to provide reference to `SecureBoot` instance
It is calculated once when the first time it is queried. On refresh,
this property gets reset.
]
return[call[name[secure_boot].SecureBoot, parameter[name[self]._conn, call[name[utils].get_subresource_path_by, parameter[name[self], constant[SecureBoot]]]]]] | keyword[def] identifier[secure_boot] ( identifier[self] ):
literal[string]
keyword[return] identifier[secure_boot] . identifier[SecureBoot] (
identifier[self] . identifier[_conn] , identifier[utils] . identifier[get_subresource_path_by] ( identifier[self] , literal[string] ),
identifier[redfish_version] = identifier[self] . identifier[redfish_version] ) | def secure_boot(self):
"""Property to provide reference to `SecureBoot` instance
It is calculated once when the first time it is queried. On refresh,
this property gets reset.
"""
return secure_boot.SecureBoot(self._conn, utils.get_subresource_path_by(self, 'SecureBoot'), redfish_version=self.redfish_version) |
def _find_keys(self, identity='image'):
"""
Finds and returns all keys for identity,
"""
prefix = add_prefix('', identity)
raw_keys = self._find_keys_raw(prefix) or []
for raw_key in raw_keys:
yield del_prefix(raw_key) | def function[_find_keys, parameter[self, identity]]:
constant[
Finds and returns all keys for identity,
]
variable[prefix] assign[=] call[name[add_prefix], parameter[constant[], name[identity]]]
variable[raw_keys] assign[=] <ast.BoolOp object at 0x7da1b1d54e50>
for taget[name[raw_key]] in starred[name[raw_keys]] begin[:]
<ast.Yield object at 0x7da1b1d55d80> | keyword[def] identifier[_find_keys] ( identifier[self] , identifier[identity] = literal[string] ):
literal[string]
identifier[prefix] = identifier[add_prefix] ( literal[string] , identifier[identity] )
identifier[raw_keys] = identifier[self] . identifier[_find_keys_raw] ( identifier[prefix] ) keyword[or] []
keyword[for] identifier[raw_key] keyword[in] identifier[raw_keys] :
keyword[yield] identifier[del_prefix] ( identifier[raw_key] ) | def _find_keys(self, identity='image'):
"""
Finds and returns all keys for identity,
"""
prefix = add_prefix('', identity)
raw_keys = self._find_keys_raw(prefix) or []
for raw_key in raw_keys:
yield del_prefix(raw_key) # depends on [control=['for'], data=['raw_key']] |
def draw_pdf(buffer, invoice):
""" Draws the invoice """
canvas = Canvas(buffer, pagesize=A4)
canvas.translate(0, 29.7 * cm)
canvas.setFont('Helvetica', 10)
canvas.saveState()
header_func(canvas)
canvas.restoreState()
canvas.saveState()
footer_func(canvas)
canvas.restoreState()
canvas.saveState()
address_func(canvas)
canvas.restoreState()
# Client address
textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
try:
if invoice.address.invoice_contact_name:
textobject.textLine(invoice.address.invoice_contact_name)
textobject.textLine(invoice.address.invoice_address_one)
if invoice.address.invoice_address_two:
textobject.textLine(invoice.address.invoice_address_two)
textobject.textLine(invoice.address.invoice_town)
if invoice.address.invoice_county:
textobject.textLine(invoice.address.invoice_county)
textobject.textLine(invoice.address.invoice_postcode)
textobject.textLine(invoice.address.country.invoice_name)
except:
pass
canvas.drawText(textobject)
# Info
textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
textobject.textLine(u'Invoice ID: %s' % invoice.invoice_id)
textobject.textLine(u'Invoice Date: %s' % invoice.invoice_date.strftime(
'%d %b %Y'))
canvas.drawText(textobject)
# Items
data = [[u'Quantity', u'Description', u'Amount', u'Total'], ]
for item in invoice.items.all():
data.append([
item.quantity,
item.description,
format_currency(item.unit_price, invoice.currency),
format_currency(item.total(), invoice.currency)
])
data.append([u'', u'', u'Total:', format_currency(invoice.total(),
invoice.currency)])
table = Table(data, colWidths=[2 * cm, 11 * cm, 3 * cm, 3 * cm])
table.setStyle([
('FONT', (0, 0), (-1, -1), 'Helvetica'),
('FONTSIZE', (0, 0), (-1, -1), 10),
('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)),
('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)),
('GRID', (-2, -1), (-1, -1), 1, (0.7, 0.7, 0.7)),
('ALIGN', (-2, 0), (-1, -1), 'RIGHT'),
('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8)),
])
tw, th, = table.wrapOn(canvas, 15 * cm, 19 * cm)
table.drawOn(canvas, 1 * cm, -8 * cm - th)
canvas.showPage()
canvas.save()
return canvas | def function[draw_pdf, parameter[buffer, invoice]]:
constant[ Draws the invoice ]
variable[canvas] assign[=] call[name[Canvas], parameter[name[buffer]]]
call[name[canvas].translate, parameter[constant[0], binary_operation[constant[29.7] * name[cm]]]]
call[name[canvas].setFont, parameter[constant[Helvetica], constant[10]]]
call[name[canvas].saveState, parameter[]]
call[name[header_func], parameter[name[canvas]]]
call[name[canvas].restoreState, parameter[]]
call[name[canvas].saveState, parameter[]]
call[name[footer_func], parameter[name[canvas]]]
call[name[canvas].restoreState, parameter[]]
call[name[canvas].saveState, parameter[]]
call[name[address_func], parameter[name[canvas]]]
call[name[canvas].restoreState, parameter[]]
variable[textobject] assign[=] call[name[canvas].beginText, parameter[binary_operation[constant[1.5] * name[cm]], binary_operation[<ast.UnaryOp object at 0x7da204620fd0> * name[cm]]]]
<ast.Try object at 0x7da204620910>
call[name[canvas].drawText, parameter[name[textobject]]]
variable[textobject] assign[=] call[name[canvas].beginText, parameter[binary_operation[constant[1.5] * name[cm]], binary_operation[<ast.UnaryOp object at 0x7da204623b50> * name[cm]]]]
call[name[textobject].textLine, parameter[binary_operation[constant[Invoice ID: %s] <ast.Mod object at 0x7da2590d6920> name[invoice].invoice_id]]]
call[name[textobject].textLine, parameter[binary_operation[constant[Invoice Date: %s] <ast.Mod object at 0x7da2590d6920> call[name[invoice].invoice_date.strftime, parameter[constant[%d %b %Y]]]]]]
call[name[canvas].drawText, parameter[name[textobject]]]
variable[data] assign[=] list[[<ast.List object at 0x7da18f811960>]]
for taget[name[item]] in starred[call[name[invoice].items.all, parameter[]]] begin[:]
call[name[data].append, parameter[list[[<ast.Attribute object at 0x7da18f811f00>, <ast.Attribute object at 0x7da18f810e50>, <ast.Call object at 0x7da18f8100d0>, <ast.Call object at 0x7da18f812fe0>]]]]
call[name[data].append, parameter[list[[<ast.Constant object at 0x7da1b254dcf0>, <ast.Constant object at 0x7da1b254e8f0>, <ast.Constant object at 0x7da1b254ce20>, <ast.Call object at 0x7da1b254caf0>]]]]
variable[table] assign[=] call[name[Table], parameter[name[data]]]
call[name[table].setStyle, parameter[list[[<ast.Tuple object at 0x7da1b254f520>, <ast.Tuple object at 0x7da1b254d690>, <ast.Tuple object at 0x7da18f810a30>, <ast.Tuple object at 0x7da18f811840>, <ast.Tuple object at 0x7da18f811600>, <ast.Tuple object at 0x7da18f812f20>, <ast.Tuple object at 0x7da18f813070>]]]]
<ast.Tuple object at 0x7da18f812260> assign[=] call[name[table].wrapOn, parameter[name[canvas], binary_operation[constant[15] * name[cm]], binary_operation[constant[19] * name[cm]]]]
call[name[table].drawOn, parameter[name[canvas], binary_operation[constant[1] * name[cm]], binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f813c40> * name[cm]] - name[th]]]]
call[name[canvas].showPage, parameter[]]
call[name[canvas].save, parameter[]]
return[name[canvas]] | keyword[def] identifier[draw_pdf] ( identifier[buffer] , identifier[invoice] ):
literal[string]
identifier[canvas] = identifier[Canvas] ( identifier[buffer] , identifier[pagesize] = identifier[A4] )
identifier[canvas] . identifier[translate] ( literal[int] , literal[int] * identifier[cm] )
identifier[canvas] . identifier[setFont] ( literal[string] , literal[int] )
identifier[canvas] . identifier[saveState] ()
identifier[header_func] ( identifier[canvas] )
identifier[canvas] . identifier[restoreState] ()
identifier[canvas] . identifier[saveState] ()
identifier[footer_func] ( identifier[canvas] )
identifier[canvas] . identifier[restoreState] ()
identifier[canvas] . identifier[saveState] ()
identifier[address_func] ( identifier[canvas] )
identifier[canvas] . identifier[restoreState] ()
identifier[textobject] = identifier[canvas] . identifier[beginText] ( literal[int] * identifier[cm] ,- literal[int] * identifier[cm] )
keyword[try] :
keyword[if] identifier[invoice] . identifier[address] . identifier[invoice_contact_name] :
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_contact_name] )
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_address_one] )
keyword[if] identifier[invoice] . identifier[address] . identifier[invoice_address_two] :
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_address_two] )
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_town] )
keyword[if] identifier[invoice] . identifier[address] . identifier[invoice_county] :
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_county] )
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[invoice_postcode] )
identifier[textobject] . identifier[textLine] ( identifier[invoice] . identifier[address] . identifier[country] . identifier[invoice_name] )
keyword[except] :
keyword[pass]
identifier[canvas] . identifier[drawText] ( identifier[textobject] )
identifier[textobject] = identifier[canvas] . identifier[beginText] ( literal[int] * identifier[cm] ,- literal[int] * identifier[cm] )
identifier[textobject] . identifier[textLine] ( literal[string] % identifier[invoice] . identifier[invoice_id] )
identifier[textobject] . identifier[textLine] ( literal[string] % identifier[invoice] . identifier[invoice_date] . identifier[strftime] (
literal[string] ))
identifier[canvas] . identifier[drawText] ( identifier[textobject] )
identifier[data] =[[ literal[string] , literal[string] , literal[string] , literal[string] ],]
keyword[for] identifier[item] keyword[in] identifier[invoice] . identifier[items] . identifier[all] ():
identifier[data] . identifier[append] ([
identifier[item] . identifier[quantity] ,
identifier[item] . identifier[description] ,
identifier[format_currency] ( identifier[item] . identifier[unit_price] , identifier[invoice] . identifier[currency] ),
identifier[format_currency] ( identifier[item] . identifier[total] (), identifier[invoice] . identifier[currency] )
])
identifier[data] . identifier[append] ([ literal[string] , literal[string] , literal[string] , identifier[format_currency] ( identifier[invoice] . identifier[total] (),
identifier[invoice] . identifier[currency] )])
identifier[table] = identifier[Table] ( identifier[data] , identifier[colWidths] =[ literal[int] * identifier[cm] , literal[int] * identifier[cm] , literal[int] * identifier[cm] , literal[int] * identifier[cm] ])
identifier[table] . identifier[setStyle] ([
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[string] ),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ),( literal[int] , literal[int] , literal[int] )),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ,( literal[int] , literal[int] , literal[int] )),
( literal[string] ,(- literal[int] ,- literal[int] ),(- literal[int] ,- literal[int] ), literal[int] ,( literal[int] , literal[int] , literal[int] )),
( literal[string] ,(- literal[int] , literal[int] ),(- literal[int] ,- literal[int] ), literal[string] ),
( literal[string] ,( literal[int] , literal[int] ),(- literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] )),
])
identifier[tw] , identifier[th] ,= identifier[table] . identifier[wrapOn] ( identifier[canvas] , literal[int] * identifier[cm] , literal[int] * identifier[cm] )
identifier[table] . identifier[drawOn] ( identifier[canvas] , literal[int] * identifier[cm] ,- literal[int] * identifier[cm] - identifier[th] )
identifier[canvas] . identifier[showPage] ()
identifier[canvas] . identifier[save] ()
keyword[return] identifier[canvas] | def draw_pdf(buffer, invoice):
""" Draws the invoice """
canvas = Canvas(buffer, pagesize=A4)
canvas.translate(0, 29.7 * cm)
canvas.setFont('Helvetica', 10)
canvas.saveState()
header_func(canvas)
canvas.restoreState()
canvas.saveState()
footer_func(canvas)
canvas.restoreState()
canvas.saveState()
address_func(canvas)
canvas.restoreState()
# Client address
textobject = canvas.beginText(1.5 * cm, -2.5 * cm)
try:
if invoice.address.invoice_contact_name:
textobject.textLine(invoice.address.invoice_contact_name) # depends on [control=['if'], data=[]]
textobject.textLine(invoice.address.invoice_address_one)
if invoice.address.invoice_address_two:
textobject.textLine(invoice.address.invoice_address_two) # depends on [control=['if'], data=[]]
textobject.textLine(invoice.address.invoice_town)
if invoice.address.invoice_county:
textobject.textLine(invoice.address.invoice_county) # depends on [control=['if'], data=[]]
textobject.textLine(invoice.address.invoice_postcode)
textobject.textLine(invoice.address.country.invoice_name) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
canvas.drawText(textobject)
# Info
textobject = canvas.beginText(1.5 * cm, -6.75 * cm)
textobject.textLine(u'Invoice ID: %s' % invoice.invoice_id)
textobject.textLine(u'Invoice Date: %s' % invoice.invoice_date.strftime('%d %b %Y'))
canvas.drawText(textobject)
# Items
data = [[u'Quantity', u'Description', u'Amount', u'Total']]
for item in invoice.items.all():
data.append([item.quantity, item.description, format_currency(item.unit_price, invoice.currency), format_currency(item.total(), invoice.currency)]) # depends on [control=['for'], data=['item']]
data.append([u'', u'', u'Total:', format_currency(invoice.total(), invoice.currency)])
table = Table(data, colWidths=[2 * cm, 11 * cm, 3 * cm, 3 * cm])
table.setStyle([('FONT', (0, 0), (-1, -1), 'Helvetica'), ('FONTSIZE', (0, 0), (-1, -1), 10), ('TEXTCOLOR', (0, 0), (-1, -1), (0.2, 0.2, 0.2)), ('GRID', (0, 0), (-1, -2), 1, (0.7, 0.7, 0.7)), ('GRID', (-2, -1), (-1, -1), 1, (0.7, 0.7, 0.7)), ('ALIGN', (-2, 0), (-1, -1), 'RIGHT'), ('BACKGROUND', (0, 0), (-1, 0), (0.8, 0.8, 0.8))])
(tw, th) = table.wrapOn(canvas, 15 * cm, 19 * cm)
table.drawOn(canvas, 1 * cm, -8 * cm - th)
canvas.showPage()
canvas.save()
return canvas |
def close(self):
"""
Saves the model (of saver dir is given) and closes the session.
"""
if self.flush_summarizer is not None:
self.monitored_session.run(fetches=self.flush_summarizer)
if self.saver_directory is not None:
self.save(append_timestep=True)
self.monitored_session.__exit__(None, None, None) | def function[close, parameter[self]]:
constant[
Saves the model (of saver dir is given) and closes the session.
]
if compare[name[self].flush_summarizer is_not constant[None]] begin[:]
call[name[self].monitored_session.run, parameter[]]
if compare[name[self].saver_directory is_not constant[None]] begin[:]
call[name[self].save, parameter[]]
call[name[self].monitored_session.__exit__, parameter[constant[None], constant[None], constant[None]]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[flush_summarizer] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[monitored_session] . identifier[run] ( identifier[fetches] = identifier[self] . identifier[flush_summarizer] )
keyword[if] identifier[self] . identifier[saver_directory] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[save] ( identifier[append_timestep] = keyword[True] )
identifier[self] . identifier[monitored_session] . identifier[__exit__] ( keyword[None] , keyword[None] , keyword[None] ) | def close(self):
"""
Saves the model (of saver dir is given) and closes the session.
"""
if self.flush_summarizer is not None:
self.monitored_session.run(fetches=self.flush_summarizer) # depends on [control=['if'], data=[]]
if self.saver_directory is not None:
self.save(append_timestep=True) # depends on [control=['if'], data=[]]
self.monitored_session.__exit__(None, None, None) |
def delete_subnetwork(kwargs=None, call=None):
'''
... versionadded:: 2017.7.0
Delete a GCE Subnetwork. Must specify name and region.
CLI Example:
.. code-block:: bash
salt-cloud -f delete_subnetwork gce name=mysubnet network=mynet1 region=us-west1
'''
if call != 'function':
raise SaltCloudSystemExit(
'The delete_subnet function must be called with -f or --function.'
)
if not kwargs or 'name' not in kwargs:
log.error(
'Must specify name of subnet.'
)
return False
if 'region' not in kwargs:
log.error(
'Must specify region of subnet.'
)
return False
name = kwargs['name']
region = kwargs['region']
conn = get_conn()
__utils__['cloud.fire_event'](
'event',
'deleting subnetwork',
'salt/cloud/subnet/deleting',
args={
'name': name,
'region': region
},
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
try:
result = conn.ex_destroy_subnetwork(name, region)
except ResourceNotFoundError as exc:
log.error(
'Subnetwork %s was not found. Exception was: %s',
name, exc, exc_info_on_loglevel=logging.DEBUG
)
return False
__utils__['cloud.fire_event'](
'event',
'deleted subnetwork',
'salt/cloud/subnet/deleted',
args={
'name': name,
'region': region
},
sock_dir=__opts__['sock_dir'],
transport=__opts__['transport']
)
return result | def function[delete_subnetwork, parameter[kwargs, call]]:
constant[
... versionadded:: 2017.7.0
Delete a GCE Subnetwork. Must specify name and region.
CLI Example:
.. code-block:: bash
salt-cloud -f delete_subnetwork gce name=mysubnet network=mynet1 region=us-west1
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da1b1c46ce0>
if <ast.BoolOp object at 0x7da1b1c478b0> begin[:]
call[name[log].error, parameter[constant[Must specify name of subnet.]]]
return[constant[False]]
if compare[constant[region] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[log].error, parameter[constant[Must specify region of subnet.]]]
return[constant[False]]
variable[name] assign[=] call[name[kwargs]][constant[name]]
variable[region] assign[=] call[name[kwargs]][constant[region]]
variable[conn] assign[=] call[name[get_conn], parameter[]]
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[deleting subnetwork], constant[salt/cloud/subnet/deleting]]]
<ast.Try object at 0x7da1b1c14250>
call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[deleted subnetwork], constant[salt/cloud/subnet/deleted]]]
return[name[result]] | keyword[def] identifier[delete_subnetwork] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[kwargs] keyword[or] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
identifier[name] = identifier[kwargs] [ literal[string] ]
identifier[region] = identifier[kwargs] [ literal[string] ]
identifier[conn] = identifier[get_conn] ()
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] ,
identifier[args] ={
literal[string] : identifier[name] ,
literal[string] : identifier[region]
},
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
keyword[try] :
identifier[result] = identifier[conn] . identifier[ex_destroy_subnetwork] ( identifier[name] , identifier[region] )
keyword[except] identifier[ResourceNotFoundError] keyword[as] identifier[exc] :
identifier[log] . identifier[error] (
literal[string] ,
identifier[name] , identifier[exc] , identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] keyword[False]
identifier[__utils__] [ literal[string] ](
literal[string] ,
literal[string] ,
literal[string] ,
identifier[args] ={
literal[string] : identifier[name] ,
literal[string] : identifier[region]
},
identifier[sock_dir] = identifier[__opts__] [ literal[string] ],
identifier[transport] = identifier[__opts__] [ literal[string] ]
)
keyword[return] identifier[result] | def delete_subnetwork(kwargs=None, call=None):
"""
... versionadded:: 2017.7.0
Delete a GCE Subnetwork. Must specify name and region.
CLI Example:
.. code-block:: bash
salt-cloud -f delete_subnetwork gce name=mysubnet network=mynet1 region=us-west1
"""
if call != 'function':
raise SaltCloudSystemExit('The delete_subnet function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if not kwargs or 'name' not in kwargs:
log.error('Must specify name of subnet.')
return False # depends on [control=['if'], data=[]]
if 'region' not in kwargs:
log.error('Must specify region of subnet.')
return False # depends on [control=['if'], data=[]]
name = kwargs['name']
region = kwargs['region']
conn = get_conn()
__utils__['cloud.fire_event']('event', 'deleting subnetwork', 'salt/cloud/subnet/deleting', args={'name': name, 'region': region}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
try:
result = conn.ex_destroy_subnetwork(name, region) # depends on [control=['try'], data=[]]
except ResourceNotFoundError as exc:
log.error('Subnetwork %s was not found. Exception was: %s', name, exc, exc_info_on_loglevel=logging.DEBUG)
return False # depends on [control=['except'], data=['exc']]
__utils__['cloud.fire_event']('event', 'deleted subnetwork', 'salt/cloud/subnet/deleted', args={'name': name, 'region': region}, sock_dir=__opts__['sock_dir'], transport=__opts__['transport'])
return result |
def isoutside(coords, shape):
r"""
Identifies points that lie outside the specified region.
Parameters
----------
domain_size : array_like
The size and shape of the domain beyond which points should be
trimmed. The argument is treated as follows:
**sphere** : If a scalar or single element list is received, it's
treated as the radius [r] of a sphere centered on [0, 0, 0].
**cylinder** : If a two-element list is received it's treated as
the radius and height of a cylinder [r, z] whose central axis
starts at [0, 0, 0] and extends in the positive z-direction.
**rectangle** : If a three element list is received, it's treated
as the outer corner of rectangle [x, y, z] whose opposite corner
lies at [0, 0, 0].
Returns
-------
An Np-long mask of True values indicating pores that lie outside the
domain.
"""
# Label external pores for trimming below
if len(shape) == 1: # Spherical
# Find external points
r = sp.sqrt(sp.sum(coords**2, axis=1))
Ps = r > shape[0]
elif len(shape) == 2: # Cylindrical
# Find external pores outside radius
r = sp.sqrt(sp.sum(coords[:, [0, 1]]**2, axis=1))
Ps = r > shape[0]
# Find external pores above and below cylinder
if shape[1] > 0:
Ps = Ps + (coords[:, 2] > shape[1])
Ps = Ps + (coords[:, 2] < 0)
else:
pass
elif len(shape) == 3: # Rectilinear
shape = sp.array(shape, dtype=float)
try:
lo_lim = shape[:, 0]
hi_lim = shape[:, 1]
except IndexError:
lo_lim = sp.array([0, 0, 0])
hi_lim = shape
Ps1 = sp.any(coords > hi_lim, axis=1)
Ps2 = sp.any(coords < lo_lim, axis=1)
Ps = Ps1 + Ps2
return Ps | def function[isoutside, parameter[coords, shape]]:
constant[
Identifies points that lie outside the specified region.
Parameters
----------
domain_size : array_like
The size and shape of the domain beyond which points should be
trimmed. The argument is treated as follows:
**sphere** : If a scalar or single element list is received, it's
treated as the radius [r] of a sphere centered on [0, 0, 0].
**cylinder** : If a two-element list is received it's treated as
the radius and height of a cylinder [r, z] whose central axis
starts at [0, 0, 0] and extends in the positive z-direction.
**rectangle** : If a three element list is received, it's treated
as the outer corner of rectangle [x, y, z] whose opposite corner
lies at [0, 0, 0].
Returns
-------
An Np-long mask of True values indicating pores that lie outside the
domain.
]
if compare[call[name[len], parameter[name[shape]]] equal[==] constant[1]] begin[:]
variable[r] assign[=] call[name[sp].sqrt, parameter[call[name[sp].sum, parameter[binary_operation[name[coords] ** constant[2]]]]]]
variable[Ps] assign[=] compare[name[r] greater[>] call[name[shape]][constant[0]]]
return[name[Ps]] | keyword[def] identifier[isoutside] ( identifier[coords] , identifier[shape] ):
literal[string]
keyword[if] identifier[len] ( identifier[shape] )== literal[int] :
identifier[r] = identifier[sp] . identifier[sqrt] ( identifier[sp] . identifier[sum] ( identifier[coords] ** literal[int] , identifier[axis] = literal[int] ))
identifier[Ps] = identifier[r] > identifier[shape] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[shape] )== literal[int] :
identifier[r] = identifier[sp] . identifier[sqrt] ( identifier[sp] . identifier[sum] ( identifier[coords] [:,[ literal[int] , literal[int] ]]** literal[int] , identifier[axis] = literal[int] ))
identifier[Ps] = identifier[r] > identifier[shape] [ literal[int] ]
keyword[if] identifier[shape] [ literal[int] ]> literal[int] :
identifier[Ps] = identifier[Ps] +( identifier[coords] [:, literal[int] ]> identifier[shape] [ literal[int] ])
identifier[Ps] = identifier[Ps] +( identifier[coords] [:, literal[int] ]< literal[int] )
keyword[else] :
keyword[pass]
keyword[elif] identifier[len] ( identifier[shape] )== literal[int] :
identifier[shape] = identifier[sp] . identifier[array] ( identifier[shape] , identifier[dtype] = identifier[float] )
keyword[try] :
identifier[lo_lim] = identifier[shape] [:, literal[int] ]
identifier[hi_lim] = identifier[shape] [:, literal[int] ]
keyword[except] identifier[IndexError] :
identifier[lo_lim] = identifier[sp] . identifier[array] ([ literal[int] , literal[int] , literal[int] ])
identifier[hi_lim] = identifier[shape]
identifier[Ps1] = identifier[sp] . identifier[any] ( identifier[coords] > identifier[hi_lim] , identifier[axis] = literal[int] )
identifier[Ps2] = identifier[sp] . identifier[any] ( identifier[coords] < identifier[lo_lim] , identifier[axis] = literal[int] )
identifier[Ps] = identifier[Ps1] + identifier[Ps2]
keyword[return] identifier[Ps] | def isoutside(coords, shape):
"""
Identifies points that lie outside the specified region.
Parameters
----------
domain_size : array_like
The size and shape of the domain beyond which points should be
trimmed. The argument is treated as follows:
**sphere** : If a scalar or single element list is received, it's
treated as the radius [r] of a sphere centered on [0, 0, 0].
**cylinder** : If a two-element list is received it's treated as
the radius and height of a cylinder [r, z] whose central axis
starts at [0, 0, 0] and extends in the positive z-direction.
**rectangle** : If a three element list is received, it's treated
as the outer corner of rectangle [x, y, z] whose opposite corner
lies at [0, 0, 0].
Returns
-------
An Np-long mask of True values indicating pores that lie outside the
domain.
"""
# Label external pores for trimming below
if len(shape) == 1: # Spherical
# Find external points
r = sp.sqrt(sp.sum(coords ** 2, axis=1))
Ps = r > shape[0] # depends on [control=['if'], data=[]]
elif len(shape) == 2: # Cylindrical
# Find external pores outside radius
r = sp.sqrt(sp.sum(coords[:, [0, 1]] ** 2, axis=1))
Ps = r > shape[0]
# Find external pores above and below cylinder
if shape[1] > 0:
Ps = Ps + (coords[:, 2] > shape[1])
Ps = Ps + (coords[:, 2] < 0) # depends on [control=['if'], data=[]]
else:
pass # depends on [control=['if'], data=[]]
elif len(shape) == 3: # Rectilinear
shape = sp.array(shape, dtype=float)
try:
lo_lim = shape[:, 0]
hi_lim = shape[:, 1] # depends on [control=['try'], data=[]]
except IndexError:
lo_lim = sp.array([0, 0, 0])
hi_lim = shape # depends on [control=['except'], data=[]]
Ps1 = sp.any(coords > hi_lim, axis=1)
Ps2 = sp.any(coords < lo_lim, axis=1)
Ps = Ps1 + Ps2 # depends on [control=['if'], data=[]]
return Ps |
def bandpass_order(f_stop1, f_pass1, f_pass2, f_stop2, dpass_dB, dstop_dB, fsamp = 1):
"""
Optimal FIR (equal ripple) Bandpass Order Determination
Text reference: Ifeachor, Digital Signal Processing a Practical Approach,
second edition, Prentice Hall, 2002.
Journal paper reference: F. Mintzer & B. Liu, Practical Design Rules for Optimum
FIR Bandpass Digital Filters, IEEE Transactions on Acoustics and Speech, pp.
204-206, April,1979.
"""
dpass = 1 - 10**(-dpass_dB/20)
dstop = 10**(-dstop_dB/20)
Df1 = (f_pass1 - f_stop1)/fsamp
Df2 = (f_stop2 - f_pass2)/fsamp
b1 = 0.01201
b2 = 0.09664
b3 = -0.51325
b4 = 0.00203
b5 = -0.5705
b6 = -0.44314
Df = min(Df1, Df2)
Cinf = np.log10(dstop)*(b1*np.log10(dpass)**2 + b2*np.log10(dpass) + b3) \
+ (b4*np.log10(dpass)**2 + b5*np.log10(dpass) + b6)
g = -14.6*np.log10(dpass/dstop) - 16.9
N = Cinf/Df + g*Df + 1
ff = 2*np.array([0, f_stop1, f_pass1, f_pass2, f_stop2, fsamp/2])/fsamp
aa = np.array([0, 0, 1, 1, 0, 0])
wts = np.array([dpass/dstop, 1, dpass/dstop])
return int(N), ff, aa, wts | def function[bandpass_order, parameter[f_stop1, f_pass1, f_pass2, f_stop2, dpass_dB, dstop_dB, fsamp]]:
constant[
Optimal FIR (equal ripple) Bandpass Order Determination
Text reference: Ifeachor, Digital Signal Processing a Practical Approach,
second edition, Prentice Hall, 2002.
Journal paper reference: F. Mintzer & B. Liu, Practical Design Rules for Optimum
FIR Bandpass Digital Filters, IEEE Transactions on Acoustics and Speech, pp.
204-206, April,1979.
]
variable[dpass] assign[=] binary_operation[constant[1] - binary_operation[constant[10] ** binary_operation[<ast.UnaryOp object at 0x7da18dc9afb0> / constant[20]]]]
variable[dstop] assign[=] binary_operation[constant[10] ** binary_operation[<ast.UnaryOp object at 0x7da18dc9b6a0> / constant[20]]]
variable[Df1] assign[=] binary_operation[binary_operation[name[f_pass1] - name[f_stop1]] / name[fsamp]]
variable[Df2] assign[=] binary_operation[binary_operation[name[f_stop2] - name[f_pass2]] / name[fsamp]]
variable[b1] assign[=] constant[0.01201]
variable[b2] assign[=] constant[0.09664]
variable[b3] assign[=] <ast.UnaryOp object at 0x7da18dc9b250>
variable[b4] assign[=] constant[0.00203]
variable[b5] assign[=] <ast.UnaryOp object at 0x7da18dc98340>
variable[b6] assign[=] <ast.UnaryOp object at 0x7da18dc9a3b0>
variable[Df] assign[=] call[name[min], parameter[name[Df1], name[Df2]]]
variable[Cinf] assign[=] binary_operation[binary_operation[call[name[np].log10, parameter[name[dstop]]] * binary_operation[binary_operation[binary_operation[name[b1] * binary_operation[call[name[np].log10, parameter[name[dpass]]] ** constant[2]]] + binary_operation[name[b2] * call[name[np].log10, parameter[name[dpass]]]]] + name[b3]]] + binary_operation[binary_operation[binary_operation[name[b4] * binary_operation[call[name[np].log10, parameter[name[dpass]]] ** constant[2]]] + binary_operation[name[b5] * call[name[np].log10, parameter[name[dpass]]]]] + name[b6]]]
variable[g] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18dc9bbb0> * call[name[np].log10, parameter[binary_operation[name[dpass] / name[dstop]]]]] - constant[16.9]]
variable[N] assign[=] binary_operation[binary_operation[binary_operation[name[Cinf] / name[Df]] + binary_operation[name[g] * name[Df]]] + constant[1]]
variable[ff] assign[=] binary_operation[binary_operation[constant[2] * call[name[np].array, parameter[list[[<ast.Constant object at 0x7da18dc9ace0>, <ast.Name object at 0x7da18dc98610>, <ast.Name object at 0x7da18dc9b490>, <ast.Name object at 0x7da18dc9a620>, <ast.Name object at 0x7da18dc981c0>, <ast.BinOp object at 0x7da18dc9abf0>]]]]] / name[fsamp]]
variable[aa] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da18dc9bb80>, <ast.Constant object at 0x7da18dc993c0>, <ast.Constant object at 0x7da18dc98a00>, <ast.Constant object at 0x7da18dc9b430>, <ast.Constant object at 0x7da18dc9bca0>, <ast.Constant object at 0x7da18dc9a740>]]]]
variable[wts] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da18dc99de0>, <ast.Constant object at 0x7da18dc989d0>, <ast.BinOp object at 0x7da18dc9a6b0>]]]]
return[tuple[[<ast.Call object at 0x7da18dc99c60>, <ast.Name object at 0x7da18dc9a7a0>, <ast.Name object at 0x7da18dc9a1d0>, <ast.Name object at 0x7da18dc99120>]]] | keyword[def] identifier[bandpass_order] ( identifier[f_stop1] , identifier[f_pass1] , identifier[f_pass2] , identifier[f_stop2] , identifier[dpass_dB] , identifier[dstop_dB] , identifier[fsamp] = literal[int] ):
literal[string]
identifier[dpass] = literal[int] - literal[int] **(- identifier[dpass_dB] / literal[int] )
identifier[dstop] = literal[int] **(- identifier[dstop_dB] / literal[int] )
identifier[Df1] =( identifier[f_pass1] - identifier[f_stop1] )/ identifier[fsamp]
identifier[Df2] =( identifier[f_stop2] - identifier[f_pass2] )/ identifier[fsamp]
identifier[b1] = literal[int]
identifier[b2] = literal[int]
identifier[b3] =- literal[int]
identifier[b4] = literal[int]
identifier[b5] =- literal[int]
identifier[b6] =- literal[int]
identifier[Df] = identifier[min] ( identifier[Df1] , identifier[Df2] )
identifier[Cinf] = identifier[np] . identifier[log10] ( identifier[dstop] )*( identifier[b1] * identifier[np] . identifier[log10] ( identifier[dpass] )** literal[int] + identifier[b2] * identifier[np] . identifier[log10] ( identifier[dpass] )+ identifier[b3] )+( identifier[b4] * identifier[np] . identifier[log10] ( identifier[dpass] )** literal[int] + identifier[b5] * identifier[np] . identifier[log10] ( identifier[dpass] )+ identifier[b6] )
identifier[g] =- literal[int] * identifier[np] . identifier[log10] ( identifier[dpass] / identifier[dstop] )- literal[int]
identifier[N] = identifier[Cinf] / identifier[Df] + identifier[g] * identifier[Df] + literal[int]
identifier[ff] = literal[int] * identifier[np] . identifier[array] ([ literal[int] , identifier[f_stop1] , identifier[f_pass1] , identifier[f_pass2] , identifier[f_stop2] , identifier[fsamp] / literal[int] ])/ identifier[fsamp]
identifier[aa] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[wts] = identifier[np] . identifier[array] ([ identifier[dpass] / identifier[dstop] , literal[int] , identifier[dpass] / identifier[dstop] ])
keyword[return] identifier[int] ( identifier[N] ), identifier[ff] , identifier[aa] , identifier[wts] | def bandpass_order(f_stop1, f_pass1, f_pass2, f_stop2, dpass_dB, dstop_dB, fsamp=1):
"""
Optimal FIR (equal ripple) Bandpass Order Determination
Text reference: Ifeachor, Digital Signal Processing a Practical Approach,
second edition, Prentice Hall, 2002.
Journal paper reference: F. Mintzer & B. Liu, Practical Design Rules for Optimum
FIR Bandpass Digital Filters, IEEE Transactions on Acoustics and Speech, pp.
204-206, April,1979.
"""
dpass = 1 - 10 ** (-dpass_dB / 20)
dstop = 10 ** (-dstop_dB / 20)
Df1 = (f_pass1 - f_stop1) / fsamp
Df2 = (f_stop2 - f_pass2) / fsamp
b1 = 0.01201
b2 = 0.09664
b3 = -0.51325
b4 = 0.00203
b5 = -0.5705
b6 = -0.44314
Df = min(Df1, Df2)
Cinf = np.log10(dstop) * (b1 * np.log10(dpass) ** 2 + b2 * np.log10(dpass) + b3) + (b4 * np.log10(dpass) ** 2 + b5 * np.log10(dpass) + b6)
g = -14.6 * np.log10(dpass / dstop) - 16.9
N = Cinf / Df + g * Df + 1
ff = 2 * np.array([0, f_stop1, f_pass1, f_pass2, f_stop2, fsamp / 2]) / fsamp
aa = np.array([0, 0, 1, 1, 0, 0])
wts = np.array([dpass / dstop, 1, dpass / dstop])
return (int(N), ff, aa, wts) |
def _to_fields(self, *values):
"""
Take a list of values, which must be primary keys of the model linked
to the related collection, and return a list of related fields.
"""
result = []
for related_instance in values:
if not isinstance(related_instance, model.RedisModel):
related_instance = self.related_field._model(related_instance)
result.append(getattr(related_instance, self.related_field.name))
return result | def function[_to_fields, parameter[self]]:
constant[
Take a list of values, which must be primary keys of the model linked
to the related collection, and return a list of related fields.
]
variable[result] assign[=] list[[]]
for taget[name[related_instance]] in starred[name[values]] begin[:]
if <ast.UnaryOp object at 0x7da1b135f070> begin[:]
variable[related_instance] assign[=] call[name[self].related_field._model, parameter[name[related_instance]]]
call[name[result].append, parameter[call[name[getattr], parameter[name[related_instance], name[self].related_field.name]]]]
return[name[result]] | keyword[def] identifier[_to_fields] ( identifier[self] ,* identifier[values] ):
literal[string]
identifier[result] =[]
keyword[for] identifier[related_instance] keyword[in] identifier[values] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[related_instance] , identifier[model] . identifier[RedisModel] ):
identifier[related_instance] = identifier[self] . identifier[related_field] . identifier[_model] ( identifier[related_instance] )
identifier[result] . identifier[append] ( identifier[getattr] ( identifier[related_instance] , identifier[self] . identifier[related_field] . identifier[name] ))
keyword[return] identifier[result] | def _to_fields(self, *values):
"""
Take a list of values, which must be primary keys of the model linked
to the related collection, and return a list of related fields.
"""
result = []
for related_instance in values:
if not isinstance(related_instance, model.RedisModel):
related_instance = self.related_field._model(related_instance) # depends on [control=['if'], data=[]]
result.append(getattr(related_instance, self.related_field.name)) # depends on [control=['for'], data=['related_instance']]
return result |
def change_password(self, current_password, new_password):
"""
Changes account password.
@param current_password: current password
@param new_password: new password
"""
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=self.auth_token.account_name,
attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_OLD_PASSWORD: current_password,
sconstant.E_PASSWORD: new_password}
self.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.ChangePasswordRequest,
params) | def function[change_password, parameter[self, current_password, new_password]]:
constant[
Changes account password.
@param current_password: current password
@param new_password: new password
]
variable[attrs] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0948910>], [<ast.Attribute object at 0x7da1b094bac0>]]
variable[account] assign[=] call[name[SOAPpy].Types.stringType, parameter[]]
variable[params] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0949c30>, <ast.Attribute object at 0x7da1b094bc10>, <ast.Attribute object at 0x7da1b09493c0>], [<ast.Name object at 0x7da1b094af20>, <ast.Name object at 0x7da1b0948b20>, <ast.Name object at 0x7da1b0949d50>]]
call[name[self].invoke, parameter[name[zconstant].NS_ZIMBRA_ACC_URL, name[sconstant].ChangePasswordRequest, name[params]]] | keyword[def] identifier[change_password] ( identifier[self] , identifier[current_password] , identifier[new_password] ):
literal[string]
identifier[attrs] ={ identifier[sconstant] . identifier[A_BY] : identifier[sconstant] . identifier[V_NAME] }
identifier[account] = identifier[SOAPpy] . identifier[Types] . identifier[stringType] ( identifier[data] = identifier[self] . identifier[auth_token] . identifier[account_name] ,
identifier[attrs] = identifier[attrs] )
identifier[params] ={ identifier[sconstant] . identifier[E_ACCOUNT] : identifier[account] ,
identifier[sconstant] . identifier[E_OLD_PASSWORD] : identifier[current_password] ,
identifier[sconstant] . identifier[E_PASSWORD] : identifier[new_password] }
identifier[self] . identifier[invoke] ( identifier[zconstant] . identifier[NS_ZIMBRA_ACC_URL] ,
identifier[sconstant] . identifier[ChangePasswordRequest] ,
identifier[params] ) | def change_password(self, current_password, new_password):
"""
Changes account password.
@param current_password: current password
@param new_password: new password
"""
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=self.auth_token.account_name, attrs=attrs)
params = {sconstant.E_ACCOUNT: account, sconstant.E_OLD_PASSWORD: current_password, sconstant.E_PASSWORD: new_password}
self.invoke(zconstant.NS_ZIMBRA_ACC_URL, sconstant.ChangePasswordRequest, params) |
def _process_ups(ups): # pragma: no cover
"""This function processes the UpdateInfo instances of the two
undo stacks (clustering and cluster metadata) and concatenates them
into a single UpdateInfo instance."""
if len(ups) == 0:
return
elif len(ups) == 1:
return ups[0]
elif len(ups) == 2:
up = ups[0]
up.update(ups[1])
return up
else:
raise NotImplementedError() | def function[_process_ups, parameter[ups]]:
constant[This function processes the UpdateInfo instances of the two
undo stacks (clustering and cluster metadata) and concatenates them
into a single UpdateInfo instance.]
if compare[call[name[len], parameter[name[ups]]] equal[==] constant[0]] begin[:]
return[None] | keyword[def] identifier[_process_ups] ( identifier[ups] ):
literal[string]
keyword[if] identifier[len] ( identifier[ups] )== literal[int] :
keyword[return]
keyword[elif] identifier[len] ( identifier[ups] )== literal[int] :
keyword[return] identifier[ups] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[ups] )== literal[int] :
identifier[up] = identifier[ups] [ literal[int] ]
identifier[up] . identifier[update] ( identifier[ups] [ literal[int] ])
keyword[return] identifier[up]
keyword[else] :
keyword[raise] identifier[NotImplementedError] () | def _process_ups(ups): # pragma: no cover
'This function processes the UpdateInfo instances of the two\n undo stacks (clustering and cluster metadata) and concatenates them\n into a single UpdateInfo instance.'
if len(ups) == 0:
return # depends on [control=['if'], data=[]]
elif len(ups) == 1:
return ups[0] # depends on [control=['if'], data=[]]
elif len(ups) == 2:
up = ups[0]
up.update(ups[1])
return up # depends on [control=['if'], data=[]]
else:
raise NotImplementedError() |
def _psplit(self):
"""
Split `self` at both north and south poles.
:return: A list of split StridedIntervals
"""
nsplit_list = self._nsplit()
psplit_list = [ ]
for si in nsplit_list:
psplit_list.extend(si._ssplit())
return psplit_list | def function[_psplit, parameter[self]]:
constant[
Split `self` at both north and south poles.
:return: A list of split StridedIntervals
]
variable[nsplit_list] assign[=] call[name[self]._nsplit, parameter[]]
variable[psplit_list] assign[=] list[[]]
for taget[name[si]] in starred[name[nsplit_list]] begin[:]
call[name[psplit_list].extend, parameter[call[name[si]._ssplit, parameter[]]]]
return[name[psplit_list]] | keyword[def] identifier[_psplit] ( identifier[self] ):
literal[string]
identifier[nsplit_list] = identifier[self] . identifier[_nsplit] ()
identifier[psplit_list] =[]
keyword[for] identifier[si] keyword[in] identifier[nsplit_list] :
identifier[psplit_list] . identifier[extend] ( identifier[si] . identifier[_ssplit] ())
keyword[return] identifier[psplit_list] | def _psplit(self):
"""
Split `self` at both north and south poles.
:return: A list of split StridedIntervals
"""
nsplit_list = self._nsplit()
psplit_list = []
for si in nsplit_list:
psplit_list.extend(si._ssplit()) # depends on [control=['for'], data=['si']]
return psplit_list |
def integrate(datasets_full, genes_list, batch_size=BATCH_SIZE,
verbose=VERBOSE, ds_names=None, dimred=DIMRED, approx=APPROX,
sigma=SIGMA, alpha=ALPHA, knn=KNN, geosketch=False,
geosketch_max=20000, n_iter=1, union=False, hvg=None):
"""Integrate a list of data sets.
Parameters
----------
datasets_full : `list` of `scipy.sparse.csr_matrix` or of `numpy.ndarray`
Data sets to integrate and correct.
genes_list: `list` of `list` of `string`
List of genes for each data set.
batch_size: `int`, optional (default: `5000`)
The batch size used in the alignment vector computation. Useful when
correcting very large (>100k samples) data sets. Set to large value
that runs within available memory.
verbose: `bool` or `int`, optional (default: 2)
When `True` or not equal to 0, prints logging output.
ds_names: `list` of `string`, optional
When `verbose=True`, reports data set names in logging output.
dimred: `int`, optional (default: 100)
Dimensionality of integrated embedding.
approx: `bool`, optional (default: `True`)
Use approximate nearest neighbors, greatly speeds up matching runtime.
sigma: `float`, optional (default: 15)
Correction smoothing parameter on Gaussian kernel.
alpha: `float`, optional (default: 0.10)
Alignment score minimum cutoff.
knn: `int`, optional (default: 20)
Number of nearest neighbors to use for matching.
hvg: `int`, optional (default: None)
Use this number of top highly variable genes based on dispersion.
Returns
-------
integrated, genes
Returns a two-tuple containing a list of `numpy.ndarray` with
integrated low dimensional embeddings and a single list of genes
containing the intersection of inputted genes.
"""
datasets_full = check_datasets(datasets_full)
datasets, genes = merge_datasets(datasets_full, genes_list,
ds_names=ds_names, union=union)
datasets_dimred, genes = process_data(datasets, genes, hvg=hvg,
dimred=dimred)
for _ in range(n_iter):
datasets_dimred = assemble(
datasets_dimred, # Assemble in low dimensional space.
verbose=verbose, knn=knn, sigma=sigma, approx=approx,
alpha=alpha, ds_names=ds_names, batch_size=batch_size,
geosketch=geosketch, geosketch_max=geosketch_max,
)
return datasets_dimred, genes | def function[integrate, parameter[datasets_full, genes_list, batch_size, verbose, ds_names, dimred, approx, sigma, alpha, knn, geosketch, geosketch_max, n_iter, union, hvg]]:
constant[Integrate a list of data sets.
Parameters
----------
datasets_full : `list` of `scipy.sparse.csr_matrix` or of `numpy.ndarray`
Data sets to integrate and correct.
genes_list: `list` of `list` of `string`
List of genes for each data set.
batch_size: `int`, optional (default: `5000`)
The batch size used in the alignment vector computation. Useful when
correcting very large (>100k samples) data sets. Set to large value
that runs within available memory.
verbose: `bool` or `int`, optional (default: 2)
When `True` or not equal to 0, prints logging output.
ds_names: `list` of `string`, optional
When `verbose=True`, reports data set names in logging output.
dimred: `int`, optional (default: 100)
Dimensionality of integrated embedding.
approx: `bool`, optional (default: `True`)
Use approximate nearest neighbors, greatly speeds up matching runtime.
sigma: `float`, optional (default: 15)
Correction smoothing parameter on Gaussian kernel.
alpha: `float`, optional (default: 0.10)
Alignment score minimum cutoff.
knn: `int`, optional (default: 20)
Number of nearest neighbors to use for matching.
hvg: `int`, optional (default: None)
Use this number of top highly variable genes based on dispersion.
Returns
-------
integrated, genes
Returns a two-tuple containing a list of `numpy.ndarray` with
integrated low dimensional embeddings and a single list of genes
containing the intersection of inputted genes.
]
variable[datasets_full] assign[=] call[name[check_datasets], parameter[name[datasets_full]]]
<ast.Tuple object at 0x7da1b11a5330> assign[=] call[name[merge_datasets], parameter[name[datasets_full], name[genes_list]]]
<ast.Tuple object at 0x7da1b11a5210> assign[=] call[name[process_data], parameter[name[datasets], name[genes]]]
for taget[name[_]] in starred[call[name[range], parameter[name[n_iter]]]] begin[:]
variable[datasets_dimred] assign[=] call[name[assemble], parameter[name[datasets_dimred]]]
return[tuple[[<ast.Name object at 0x7da1b11a7c40>, <ast.Name object at 0x7da1b11a51e0>]]] | keyword[def] identifier[integrate] ( identifier[datasets_full] , identifier[genes_list] , identifier[batch_size] = identifier[BATCH_SIZE] ,
identifier[verbose] = identifier[VERBOSE] , identifier[ds_names] = keyword[None] , identifier[dimred] = identifier[DIMRED] , identifier[approx] = identifier[APPROX] ,
identifier[sigma] = identifier[SIGMA] , identifier[alpha] = identifier[ALPHA] , identifier[knn] = identifier[KNN] , identifier[geosketch] = keyword[False] ,
identifier[geosketch_max] = literal[int] , identifier[n_iter] = literal[int] , identifier[union] = keyword[False] , identifier[hvg] = keyword[None] ):
literal[string]
identifier[datasets_full] = identifier[check_datasets] ( identifier[datasets_full] )
identifier[datasets] , identifier[genes] = identifier[merge_datasets] ( identifier[datasets_full] , identifier[genes_list] ,
identifier[ds_names] = identifier[ds_names] , identifier[union] = identifier[union] )
identifier[datasets_dimred] , identifier[genes] = identifier[process_data] ( identifier[datasets] , identifier[genes] , identifier[hvg] = identifier[hvg] ,
identifier[dimred] = identifier[dimred] )
keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[n_iter] ):
identifier[datasets_dimred] = identifier[assemble] (
identifier[datasets_dimred] ,
identifier[verbose] = identifier[verbose] , identifier[knn] = identifier[knn] , identifier[sigma] = identifier[sigma] , identifier[approx] = identifier[approx] ,
identifier[alpha] = identifier[alpha] , identifier[ds_names] = identifier[ds_names] , identifier[batch_size] = identifier[batch_size] ,
identifier[geosketch] = identifier[geosketch] , identifier[geosketch_max] = identifier[geosketch_max] ,
)
keyword[return] identifier[datasets_dimred] , identifier[genes] | def integrate(datasets_full, genes_list, batch_size=BATCH_SIZE, verbose=VERBOSE, ds_names=None, dimred=DIMRED, approx=APPROX, sigma=SIGMA, alpha=ALPHA, knn=KNN, geosketch=False, geosketch_max=20000, n_iter=1, union=False, hvg=None):
"""Integrate a list of data sets.
Parameters
----------
datasets_full : `list` of `scipy.sparse.csr_matrix` or of `numpy.ndarray`
Data sets to integrate and correct.
genes_list: `list` of `list` of `string`
List of genes for each data set.
batch_size: `int`, optional (default: `5000`)
The batch size used in the alignment vector computation. Useful when
correcting very large (>100k samples) data sets. Set to large value
that runs within available memory.
verbose: `bool` or `int`, optional (default: 2)
When `True` or not equal to 0, prints logging output.
ds_names: `list` of `string`, optional
When `verbose=True`, reports data set names in logging output.
dimred: `int`, optional (default: 100)
Dimensionality of integrated embedding.
approx: `bool`, optional (default: `True`)
Use approximate nearest neighbors, greatly speeds up matching runtime.
sigma: `float`, optional (default: 15)
Correction smoothing parameter on Gaussian kernel.
alpha: `float`, optional (default: 0.10)
Alignment score minimum cutoff.
knn: `int`, optional (default: 20)
Number of nearest neighbors to use for matching.
hvg: `int`, optional (default: None)
Use this number of top highly variable genes based on dispersion.
Returns
-------
integrated, genes
Returns a two-tuple containing a list of `numpy.ndarray` with
integrated low dimensional embeddings and a single list of genes
containing the intersection of inputted genes.
"""
datasets_full = check_datasets(datasets_full)
(datasets, genes) = merge_datasets(datasets_full, genes_list, ds_names=ds_names, union=union)
(datasets_dimred, genes) = process_data(datasets, genes, hvg=hvg, dimred=dimred)
for _ in range(n_iter): # Assemble in low dimensional space.
datasets_dimred = assemble(datasets_dimred, verbose=verbose, knn=knn, sigma=sigma, approx=approx, alpha=alpha, ds_names=ds_names, batch_size=batch_size, geosketch=geosketch, geosketch_max=geosketch_max) # depends on [control=['for'], data=[]]
return (datasets_dimred, genes) |
def call_for_each_tower(self, tower_fn):
"""
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
"""
# if tower_fn returns [(grad, var), ...], this returns #GPU x #VAR x 2
return DataParallelBuilder.build_on_towers(
self.towers,
tower_fn,
# use no variable scope for the first tower
use_vs=[False] + [True] * (len(self.towers) - 1)) | def function[call_for_each_tower, parameter[self, tower_fn]]:
constant[
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
]
return[call[name[DataParallelBuilder].build_on_towers, parameter[name[self].towers, name[tower_fn]]]] | keyword[def] identifier[call_for_each_tower] ( identifier[self] , identifier[tower_fn] ):
literal[string]
keyword[return] identifier[DataParallelBuilder] . identifier[build_on_towers] (
identifier[self] . identifier[towers] ,
identifier[tower_fn] ,
identifier[use_vs] =[ keyword[False] ]+[ keyword[True] ]*( identifier[len] ( identifier[self] . identifier[towers] )- literal[int] )) | def call_for_each_tower(self, tower_fn):
"""
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
"""
# if tower_fn returns [(grad, var), ...], this returns #GPU x #VAR x 2
# use no variable scope for the first tower
return DataParallelBuilder.build_on_towers(self.towers, tower_fn, use_vs=[False] + [True] * (len(self.towers) - 1)) |
def get_street_from_xy(self, **kwargs):
"""Obtain a list of streets around the specified point.
Args:
latitude (double): Latitude in decimal degrees.
longitude (double): Longitude in decimal degrees.
radius (int): Radius (in meters) of the search.
lang (str): Language code (*es* or *en*).
Returns:
Status boolean and parsed response (list[Street]), or message string
in case of error.
"""
# Endpoint parameters
params = {
'coordinateX': kwargs.get('longitude'),
'coordinateY': kwargs.get('latitude'),
'Radius': kwargs.get('radius'),
'cultureInfo': util.language_code(kwargs.get('lang'))
}
# Request
result = self.make_request('geo', 'get_street_from_xy', **params)
# Funny endpoint, no status code
if not util.check_result(result, 'site'):
return False, 'UNKNOWN ERROR'
# Parse
values = util.response_list(result, 'site')
return True, [emtype.Street(**a) for a in values] | def function[get_street_from_xy, parameter[self]]:
constant[Obtain a list of streets around the specified point.
Args:
latitude (double): Latitude in decimal degrees.
longitude (double): Longitude in decimal degrees.
radius (int): Radius (in meters) of the search.
lang (str): Language code (*es* or *en*).
Returns:
Status boolean and parsed response (list[Street]), or message string
in case of error.
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bd4b80>, <ast.Constant object at 0x7da1b0bd5f90>, <ast.Constant object at 0x7da1b0bd5900>, <ast.Constant object at 0x7da1b0bd6440>], [<ast.Call object at 0x7da1b0bd4a30>, <ast.Call object at 0x7da1b0bd7c70>, <ast.Call object at 0x7da1b0bd4d60>, <ast.Call object at 0x7da1b0bd40d0>]]
variable[result] assign[=] call[name[self].make_request, parameter[constant[geo], constant[get_street_from_xy]]]
if <ast.UnaryOp object at 0x7da1b0bd9cc0> begin[:]
return[tuple[[<ast.Constant object at 0x7da1b0bd99c0>, <ast.Constant object at 0x7da1b0bd87c0>]]]
variable[values] assign[=] call[name[util].response_list, parameter[name[result], constant[site]]]
return[tuple[[<ast.Constant object at 0x7da1b0bd9f00>, <ast.ListComp object at 0x7da1b0bd8370>]]] | keyword[def] identifier[get_street_from_xy] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[params] ={
literal[string] : identifier[kwargs] . identifier[get] ( literal[string] ),
literal[string] : identifier[kwargs] . identifier[get] ( literal[string] ),
literal[string] : identifier[kwargs] . identifier[get] ( literal[string] ),
literal[string] : identifier[util] . identifier[language_code] ( identifier[kwargs] . identifier[get] ( literal[string] ))
}
identifier[result] = identifier[self] . identifier[make_request] ( literal[string] , literal[string] ,** identifier[params] )
keyword[if] keyword[not] identifier[util] . identifier[check_result] ( identifier[result] , literal[string] ):
keyword[return] keyword[False] , literal[string]
identifier[values] = identifier[util] . identifier[response_list] ( identifier[result] , literal[string] )
keyword[return] keyword[True] ,[ identifier[emtype] . identifier[Street] (** identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[values] ] | def get_street_from_xy(self, **kwargs):
"""Obtain a list of streets around the specified point.
Args:
latitude (double): Latitude in decimal degrees.
longitude (double): Longitude in decimal degrees.
radius (int): Radius (in meters) of the search.
lang (str): Language code (*es* or *en*).
Returns:
Status boolean and parsed response (list[Street]), or message string
in case of error.
"""
# Endpoint parameters
params = {'coordinateX': kwargs.get('longitude'), 'coordinateY': kwargs.get('latitude'), 'Radius': kwargs.get('radius'), 'cultureInfo': util.language_code(kwargs.get('lang'))}
# Request
result = self.make_request('geo', 'get_street_from_xy', **params)
# Funny endpoint, no status code
if not util.check_result(result, 'site'):
return (False, 'UNKNOWN ERROR') # depends on [control=['if'], data=[]]
# Parse
values = util.response_list(result, 'site')
return (True, [emtype.Street(**a) for a in values]) |
def nla_put_msecs(msg, attrtype, msecs):
"""Add msecs Netlink attribute to Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L737
Positional arguments:
msg -- Netlink message (nl_msg class instance).
attrtype -- attribute type (integer).
msecs -- number of msecs (int(), c_uint64(), or c_ulong()).
Returns:
0 on success or a negative error code.
"""
if isinstance(msecs, c_uint64):
pass
elif isinstance(msecs, c_ulong):
msecs = c_uint64(msecs.value)
else:
msecs = c_uint64(msecs)
return nla_put_u64(msg, attrtype, msecs) | def function[nla_put_msecs, parameter[msg, attrtype, msecs]]:
constant[Add msecs Netlink attribute to Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L737
Positional arguments:
msg -- Netlink message (nl_msg class instance).
attrtype -- attribute type (integer).
msecs -- number of msecs (int(), c_uint64(), or c_ulong()).
Returns:
0 on success or a negative error code.
]
if call[name[isinstance], parameter[name[msecs], name[c_uint64]]] begin[:]
pass
return[call[name[nla_put_u64], parameter[name[msg], name[attrtype], name[msecs]]]] | keyword[def] identifier[nla_put_msecs] ( identifier[msg] , identifier[attrtype] , identifier[msecs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[msecs] , identifier[c_uint64] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[msecs] , identifier[c_ulong] ):
identifier[msecs] = identifier[c_uint64] ( identifier[msecs] . identifier[value] )
keyword[else] :
identifier[msecs] = identifier[c_uint64] ( identifier[msecs] )
keyword[return] identifier[nla_put_u64] ( identifier[msg] , identifier[attrtype] , identifier[msecs] ) | def nla_put_msecs(msg, attrtype, msecs):
"""Add msecs Netlink attribute to Netlink message.
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L737
Positional arguments:
msg -- Netlink message (nl_msg class instance).
attrtype -- attribute type (integer).
msecs -- number of msecs (int(), c_uint64(), or c_ulong()).
Returns:
0 on success or a negative error code.
"""
if isinstance(msecs, c_uint64):
pass # depends on [control=['if'], data=[]]
elif isinstance(msecs, c_ulong):
msecs = c_uint64(msecs.value) # depends on [control=['if'], data=[]]
else:
msecs = c_uint64(msecs)
return nla_put_u64(msg, attrtype, msecs) |
def check_token(self, token=None):
"""
ADMIN ONLY. Returns True or False, depending on whether the current
token is valid.
"""
if token is None:
token = self.token
resp, resp_body = self.method_head("tokens/%s" % token, admin=True)
if resp.status_code in (401, 403):
raise exc.AuthorizationFailure("You must be an admin to make this "
"call.")
return 200 <= resp.status_code < 300 | def function[check_token, parameter[self, token]]:
constant[
ADMIN ONLY. Returns True or False, depending on whether the current
token is valid.
]
if compare[name[token] is constant[None]] begin[:]
variable[token] assign[=] name[self].token
<ast.Tuple object at 0x7da2054a6ef0> assign[=] call[name[self].method_head, parameter[binary_operation[constant[tokens/%s] <ast.Mod object at 0x7da2590d6920> name[token]]]]
if compare[name[resp].status_code in tuple[[<ast.Constant object at 0x7da2054a6410>, <ast.Constant object at 0x7da2054a5f60>]]] begin[:]
<ast.Raise object at 0x7da2054a59c0>
return[compare[constant[200] less_or_equal[<=] name[resp].status_code]] | keyword[def] identifier[check_token] ( identifier[self] , identifier[token] = keyword[None] ):
literal[string]
keyword[if] identifier[token] keyword[is] keyword[None] :
identifier[token] = identifier[self] . identifier[token]
identifier[resp] , identifier[resp_body] = identifier[self] . identifier[method_head] ( literal[string] % identifier[token] , identifier[admin] = keyword[True] )
keyword[if] identifier[resp] . identifier[status_code] keyword[in] ( literal[int] , literal[int] ):
keyword[raise] identifier[exc] . identifier[AuthorizationFailure] ( literal[string]
literal[string] )
keyword[return] literal[int] <= identifier[resp] . identifier[status_code] < literal[int] | def check_token(self, token=None):
"""
ADMIN ONLY. Returns True or False, depending on whether the current
token is valid.
"""
if token is None:
token = self.token # depends on [control=['if'], data=['token']]
(resp, resp_body) = self.method_head('tokens/%s' % token, admin=True)
if resp.status_code in (401, 403):
raise exc.AuthorizationFailure('You must be an admin to make this call.') # depends on [control=['if'], data=[]]
return 200 <= resp.status_code < 300 |
def reset(name, soft=False, call=None):
'''
To reset a VM using its name
.. note::
If ``soft=True`` then issues a command to the guest operating system
asking it to perform a reboot. Otherwise hypervisor will terminate VM and start it again.
Default is soft=False
For ``soft=True`` vmtools should be installed on guest system.
CLI Example:
.. code-block:: bash
salt-cloud -a reset vmname
salt-cloud -a reset vmname soft=True
'''
if call != 'action':
raise SaltCloudSystemExit(
'The reset action must be called with '
'-a or --action.'
)
vm_properties = [
"name",
"summary.runtime.powerState"
]
vm_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.VirtualMachine, vm_properties)
for vm in vm_list:
if vm["name"] == name:
if vm["summary.runtime.powerState"] == "suspended" or vm["summary.runtime.powerState"] == "poweredOff":
ret = 'cannot reset in suspended/powered off state'
log.info('VM %s %s', name, ret)
return ret
try:
log.info('Resetting VM %s', name)
if soft:
vm["object"].RebootGuest()
else:
task = vm["object"].ResetVM_Task()
salt.utils.vmware.wait_for_task(task, name, 'reset')
except Exception as exc:
log.error(
'Error while resetting VM %s: %s',
name, exc,
# Show the traceback if the debug logging level is enabled
exc_info_on_loglevel=logging.DEBUG
)
return 'failed to reset'
return 'reset' | def function[reset, parameter[name, soft, call]]:
constant[
To reset a VM using its name
.. note::
If ``soft=True`` then issues a command to the guest operating system
asking it to perform a reboot. Otherwise hypervisor will terminate VM and start it again.
Default is soft=False
For ``soft=True`` vmtools should be installed on guest system.
CLI Example:
.. code-block:: bash
salt-cloud -a reset vmname
salt-cloud -a reset vmname soft=True
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da18f811e10>
variable[vm_properties] assign[=] list[[<ast.Constant object at 0x7da18f812b30>, <ast.Constant object at 0x7da18f810340>]]
variable[vm_list] assign[=] call[name[salt].utils.vmware.get_mors_with_properties, parameter[call[name[_get_si], parameter[]], name[vim].VirtualMachine, name[vm_properties]]]
for taget[name[vm]] in starred[name[vm_list]] begin[:]
if compare[call[name[vm]][constant[name]] equal[==] name[name]] begin[:]
if <ast.BoolOp object at 0x7da18f812650> begin[:]
variable[ret] assign[=] constant[cannot reset in suspended/powered off state]
call[name[log].info, parameter[constant[VM %s %s], name[name], name[ret]]]
return[name[ret]]
<ast.Try object at 0x7da18f8126b0>
return[constant[reset]] | keyword[def] identifier[reset] ( identifier[name] , identifier[soft] = keyword[False] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
identifier[vm_properties] =[
literal[string] ,
literal[string]
]
identifier[vm_list] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_mors_with_properties] ( identifier[_get_si] (), identifier[vim] . identifier[VirtualMachine] , identifier[vm_properties] )
keyword[for] identifier[vm] keyword[in] identifier[vm_list] :
keyword[if] identifier[vm] [ literal[string] ]== identifier[name] :
keyword[if] identifier[vm] [ literal[string] ]== literal[string] keyword[or] identifier[vm] [ literal[string] ]== literal[string] :
identifier[ret] = literal[string]
identifier[log] . identifier[info] ( literal[string] , identifier[name] , identifier[ret] )
keyword[return] identifier[ret]
keyword[try] :
identifier[log] . identifier[info] ( literal[string] , identifier[name] )
keyword[if] identifier[soft] :
identifier[vm] [ literal[string] ]. identifier[RebootGuest] ()
keyword[else] :
identifier[task] = identifier[vm] [ literal[string] ]. identifier[ResetVM_Task] ()
identifier[salt] . identifier[utils] . identifier[vmware] . identifier[wait_for_task] ( identifier[task] , identifier[name] , literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
identifier[log] . identifier[error] (
literal[string] ,
identifier[name] , identifier[exc] ,
identifier[exc_info_on_loglevel] = identifier[logging] . identifier[DEBUG]
)
keyword[return] literal[string]
keyword[return] literal[string] | def reset(name, soft=False, call=None):
"""
To reset a VM using its name
.. note::
If ``soft=True`` then issues a command to the guest operating system
asking it to perform a reboot. Otherwise hypervisor will terminate VM and start it again.
Default is soft=False
For ``soft=True`` vmtools should be installed on guest system.
CLI Example:
.. code-block:: bash
salt-cloud -a reset vmname
salt-cloud -a reset vmname soft=True
"""
if call != 'action':
raise SaltCloudSystemExit('The reset action must be called with -a or --action.') # depends on [control=['if'], data=[]]
vm_properties = ['name', 'summary.runtime.powerState']
vm_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.VirtualMachine, vm_properties)
for vm in vm_list:
if vm['name'] == name:
if vm['summary.runtime.powerState'] == 'suspended' or vm['summary.runtime.powerState'] == 'poweredOff':
ret = 'cannot reset in suspended/powered off state'
log.info('VM %s %s', name, ret)
return ret # depends on [control=['if'], data=[]]
try:
log.info('Resetting VM %s', name)
if soft:
vm['object'].RebootGuest() # depends on [control=['if'], data=[]]
else:
task = vm['object'].ResetVM_Task()
salt.utils.vmware.wait_for_task(task, name, 'reset') # depends on [control=['try'], data=[]]
except Exception as exc:
# Show the traceback if the debug logging level is enabled
log.error('Error while resetting VM %s: %s', name, exc, exc_info_on_loglevel=logging.DEBUG)
return 'failed to reset' # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=['name']] # depends on [control=['for'], data=['vm']]
return 'reset' |
def default_signal_map():
""" Create the default signal map for this system.
:return: dict
"""
name_map = {
'SIGTSTP': None,
'SIGTTIN': None,
'SIGTTOU': None,
'SIGTERM': 'terminate'}
signal_map = {}
for name, target in list(name_map.items()):
if hasattr(signal, name):
signal_map[getattr(signal, name)] = target
return signal_map | def function[default_signal_map, parameter[]]:
constant[ Create the default signal map for this system.
:return: dict
]
variable[name_map] assign[=] dictionary[[<ast.Constant object at 0x7da1b2344c70>, <ast.Constant object at 0x7da1b2345db0>, <ast.Constant object at 0x7da1b2345180>, <ast.Constant object at 0x7da1b2346140>], [<ast.Constant object at 0x7da1b2344d90>, <ast.Constant object at 0x7da1b2344a60>, <ast.Constant object at 0x7da1b2346470>, <ast.Constant object at 0x7da1b23458a0>]]
variable[signal_map] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2347c70>, <ast.Name object at 0x7da1b2345a20>]]] in starred[call[name[list], parameter[call[name[name_map].items, parameter[]]]]] begin[:]
if call[name[hasattr], parameter[name[signal], name[name]]] begin[:]
call[name[signal_map]][call[name[getattr], parameter[name[signal], name[name]]]] assign[=] name[target]
return[name[signal_map]] | keyword[def] identifier[default_signal_map] ():
literal[string]
identifier[name_map] ={
literal[string] : keyword[None] ,
literal[string] : keyword[None] ,
literal[string] : keyword[None] ,
literal[string] : literal[string] }
identifier[signal_map] ={}
keyword[for] identifier[name] , identifier[target] keyword[in] identifier[list] ( identifier[name_map] . identifier[items] ()):
keyword[if] identifier[hasattr] ( identifier[signal] , identifier[name] ):
identifier[signal_map] [ identifier[getattr] ( identifier[signal] , identifier[name] )]= identifier[target]
keyword[return] identifier[signal_map] | def default_signal_map():
""" Create the default signal map for this system.
:return: dict
"""
name_map = {'SIGTSTP': None, 'SIGTTIN': None, 'SIGTTOU': None, 'SIGTERM': 'terminate'}
signal_map = {}
for (name, target) in list(name_map.items()):
if hasattr(signal, name):
signal_map[getattr(signal, name)] = target # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return signal_map |
def _make_signature(self, header_b64, payload_b64, signing_key):
"""
Sign a serialized header and payload.
Return the urlsafe-base64-encoded signature.
"""
token_segments = [header_b64, payload_b64]
signing_input = b'.'.join(token_segments)
signer = self._get_signer(signing_key)
signer.update(signing_input)
signature = signer.finalize()
raw_signature = der_to_raw_signature(signature, signing_key.curve)
return base64url_encode(raw_signature) | def function[_make_signature, parameter[self, header_b64, payload_b64, signing_key]]:
constant[
Sign a serialized header and payload.
Return the urlsafe-base64-encoded signature.
]
variable[token_segments] assign[=] list[[<ast.Name object at 0x7da18f810c10>, <ast.Name object at 0x7da18f810850>]]
variable[signing_input] assign[=] call[constant[b'.'].join, parameter[name[token_segments]]]
variable[signer] assign[=] call[name[self]._get_signer, parameter[name[signing_key]]]
call[name[signer].update, parameter[name[signing_input]]]
variable[signature] assign[=] call[name[signer].finalize, parameter[]]
variable[raw_signature] assign[=] call[name[der_to_raw_signature], parameter[name[signature], name[signing_key].curve]]
return[call[name[base64url_encode], parameter[name[raw_signature]]]] | keyword[def] identifier[_make_signature] ( identifier[self] , identifier[header_b64] , identifier[payload_b64] , identifier[signing_key] ):
literal[string]
identifier[token_segments] =[ identifier[header_b64] , identifier[payload_b64] ]
identifier[signing_input] = literal[string] . identifier[join] ( identifier[token_segments] )
identifier[signer] = identifier[self] . identifier[_get_signer] ( identifier[signing_key] )
identifier[signer] . identifier[update] ( identifier[signing_input] )
identifier[signature] = identifier[signer] . identifier[finalize] ()
identifier[raw_signature] = identifier[der_to_raw_signature] ( identifier[signature] , identifier[signing_key] . identifier[curve] )
keyword[return] identifier[base64url_encode] ( identifier[raw_signature] ) | def _make_signature(self, header_b64, payload_b64, signing_key):
"""
Sign a serialized header and payload.
Return the urlsafe-base64-encoded signature.
"""
token_segments = [header_b64, payload_b64]
signing_input = b'.'.join(token_segments)
signer = self._get_signer(signing_key)
signer.update(signing_input)
signature = signer.finalize()
raw_signature = der_to_raw_signature(signature, signing_key.curve)
return base64url_encode(raw_signature) |
def _write_coco_results(self, _coco, detections):
""" example results
[{"image_id": 42,
"category_id": 18,
"bbox": [258.15,41.29,348.26,243.78],
"score": 0.236}, ...]
"""
cats = [cat['name'] for cat in _coco.loadCats(_coco.getCatIds())]
class_to_coco_ind = dict(zip(cats, _coco.getCatIds()))
results = []
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
logger.info('collecting %s results (%d/%d)' % (cls, cls_ind, self.num_classes - 1))
coco_cat_id = class_to_coco_ind[cls]
results.extend(self._coco_results_one_category(detections[cls_ind], coco_cat_id))
logger.info('writing results json to %s' % self._result_file)
with open(self._result_file, 'w') as f:
json.dump(results, f, sort_keys=True, indent=4) | def function[_write_coco_results, parameter[self, _coco, detections]]:
constant[ example results
[{"image_id": 42,
"category_id": 18,
"bbox": [258.15,41.29,348.26,243.78],
"score": 0.236}, ...]
]
variable[cats] assign[=] <ast.ListComp object at 0x7da1b1fb9d50>
variable[class_to_coco_ind] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[cats], call[name[_coco].getCatIds, parameter[]]]]]]
variable[results] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b1fbb250>, <ast.Name object at 0x7da1b1fbb280>]]] in starred[call[name[enumerate], parameter[name[self].classes]]] begin[:]
if compare[name[cls] equal[==] constant[__background__]] begin[:]
continue
call[name[logger].info, parameter[binary_operation[constant[collecting %s results (%d/%d)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1fb97e0>, <ast.Name object at 0x7da1b1fb98d0>, <ast.BinOp object at 0x7da1b1fb9840>]]]]]
variable[coco_cat_id] assign[=] call[name[class_to_coco_ind]][name[cls]]
call[name[results].extend, parameter[call[name[self]._coco_results_one_category, parameter[call[name[detections]][name[cls_ind]], name[coco_cat_id]]]]]
call[name[logger].info, parameter[binary_operation[constant[writing results json to %s] <ast.Mod object at 0x7da2590d6920> name[self]._result_file]]]
with call[name[open], parameter[name[self]._result_file, constant[w]]] begin[:]
call[name[json].dump, parameter[name[results], name[f]]] | keyword[def] identifier[_write_coco_results] ( identifier[self] , identifier[_coco] , identifier[detections] ):
literal[string]
identifier[cats] =[ identifier[cat] [ literal[string] ] keyword[for] identifier[cat] keyword[in] identifier[_coco] . identifier[loadCats] ( identifier[_coco] . identifier[getCatIds] ())]
identifier[class_to_coco_ind] = identifier[dict] ( identifier[zip] ( identifier[cats] , identifier[_coco] . identifier[getCatIds] ()))
identifier[results] =[]
keyword[for] identifier[cls_ind] , identifier[cls] keyword[in] identifier[enumerate] ( identifier[self] . identifier[classes] ):
keyword[if] identifier[cls] == literal[string] :
keyword[continue]
identifier[logger] . identifier[info] ( literal[string] %( identifier[cls] , identifier[cls_ind] , identifier[self] . identifier[num_classes] - literal[int] ))
identifier[coco_cat_id] = identifier[class_to_coco_ind] [ identifier[cls] ]
identifier[results] . identifier[extend] ( identifier[self] . identifier[_coco_results_one_category] ( identifier[detections] [ identifier[cls_ind] ], identifier[coco_cat_id] ))
identifier[logger] . identifier[info] ( literal[string] % identifier[self] . identifier[_result_file] )
keyword[with] identifier[open] ( identifier[self] . identifier[_result_file] , literal[string] ) keyword[as] identifier[f] :
identifier[json] . identifier[dump] ( identifier[results] , identifier[f] , identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] ) | def _write_coco_results(self, _coco, detections):
""" example results
[{"image_id": 42,
"category_id": 18,
"bbox": [258.15,41.29,348.26,243.78],
"score": 0.236}, ...]
"""
cats = [cat['name'] for cat in _coco.loadCats(_coco.getCatIds())]
class_to_coco_ind = dict(zip(cats, _coco.getCatIds()))
results = []
for (cls_ind, cls) in enumerate(self.classes):
if cls == '__background__':
continue # depends on [control=['if'], data=[]]
logger.info('collecting %s results (%d/%d)' % (cls, cls_ind, self.num_classes - 1))
coco_cat_id = class_to_coco_ind[cls]
results.extend(self._coco_results_one_category(detections[cls_ind], coco_cat_id)) # depends on [control=['for'], data=[]]
logger.info('writing results json to %s' % self._result_file)
with open(self._result_file, 'w') as f:
json.dump(results, f, sort_keys=True, indent=4) # depends on [control=['with'], data=['f']] |
def ck_portf_001(self):
''' 3-6負乖離且向上,三日內最大量,成交量大於 1000 張,收盤價大於 10 元。(較嚴謹的選股)'''
return self.a.MAO(3,6)[1] == '↑'.decode('utf-8') and (self.a.MAO(3,6)[0][1][-1] < 0 or ( self.a.MAO(3,6)[0][1][-1] < 1 and self.a.MAO(3,6)[0][1][-1] > 0 and self.a.MAO(3,6)[0][1][-2] < 0 and self.a.MAO(3,6)[0][0] == 3)) and self.a.VOLMAX3 and self.a.stock_vol[-1] > 1000*1000 and self.a.raw_data[-1] > 10 | def function[ck_portf_001, parameter[self]]:
constant[ 3-6負乖離且向上,三日內最大量,成交量大於 1000 張,收盤價大於 10 元。(較嚴謹的選股)]
return[<ast.BoolOp object at 0x7da1b197ff10>] | keyword[def] identifier[ck_portf_001] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ]== literal[string] . identifier[decode] ( literal[string] ) keyword[and] ( identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ][ literal[int] ][- literal[int] ]< literal[int] keyword[or] ( identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ][ literal[int] ][- literal[int] ]< literal[int] keyword[and] identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ][ literal[int] ][- literal[int] ]> literal[int] keyword[and] identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ][ literal[int] ][- literal[int] ]< literal[int] keyword[and] identifier[self] . identifier[a] . identifier[MAO] ( literal[int] , literal[int] )[ literal[int] ][ literal[int] ]== literal[int] )) keyword[and] identifier[self] . identifier[a] . identifier[VOLMAX3] keyword[and] identifier[self] . identifier[a] . identifier[stock_vol] [- literal[int] ]> literal[int] * literal[int] keyword[and] identifier[self] . identifier[a] . identifier[raw_data] [- literal[int] ]> literal[int] | def ck_portf_001(self):
""" 3-6負乖離且向上,三日內最大量,成交量大於 1000 張,收盤價大於 10 元。(較嚴謹的選股)"""
return self.a.MAO(3, 6)[1] == '↑'.decode('utf-8') and (self.a.MAO(3, 6)[0][1][-1] < 0 or (self.a.MAO(3, 6)[0][1][-1] < 1 and self.a.MAO(3, 6)[0][1][-1] > 0 and (self.a.MAO(3, 6)[0][1][-2] < 0) and (self.a.MAO(3, 6)[0][0] == 3))) and self.a.VOLMAX3 and (self.a.stock_vol[-1] > 1000 * 1000) and (self.a.raw_data[-1] > 10) |
def xml_output(f):
"""
Set content-type for response to WEB-REQUEST to 'text/xml'
"""
@wraps(f)
def xml_output_inner_fn(*args, **kwargs):
ret_val = f(*args, **kwargs)
if isinstance(JobContext.get_current_context(), WebJobContext):
JobContext.get_current_context().add_responder(
MimeSetterWebTaskResponder('text/xml'))
return ret_val
return xml_output_inner_fn | def function[xml_output, parameter[f]]:
constant[
Set content-type for response to WEB-REQUEST to 'text/xml'
]
def function[xml_output_inner_fn, parameter[]]:
variable[ret_val] assign[=] call[name[f], parameter[<ast.Starred object at 0x7da20c6e5ba0>]]
if call[name[isinstance], parameter[call[name[JobContext].get_current_context, parameter[]], name[WebJobContext]]] begin[:]
call[call[name[JobContext].get_current_context, parameter[]].add_responder, parameter[call[name[MimeSetterWebTaskResponder], parameter[constant[text/xml]]]]]
return[name[ret_val]]
return[name[xml_output_inner_fn]] | keyword[def] identifier[xml_output] ( identifier[f] ):
literal[string]
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[xml_output_inner_fn] (* identifier[args] ,** identifier[kwargs] ):
identifier[ret_val] = identifier[f] (* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[JobContext] . identifier[get_current_context] (), identifier[WebJobContext] ):
identifier[JobContext] . identifier[get_current_context] (). identifier[add_responder] (
identifier[MimeSetterWebTaskResponder] ( literal[string] ))
keyword[return] identifier[ret_val]
keyword[return] identifier[xml_output_inner_fn] | def xml_output(f):
"""
Set content-type for response to WEB-REQUEST to 'text/xml'
"""
@wraps(f)
def xml_output_inner_fn(*args, **kwargs):
ret_val = f(*args, **kwargs)
if isinstance(JobContext.get_current_context(), WebJobContext):
JobContext.get_current_context().add_responder(MimeSetterWebTaskResponder('text/xml')) # depends on [control=['if'], data=[]]
return ret_val
return xml_output_inner_fn |
def get(cls, **kwargs):
"""Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2)
"""
fields = {}
for field in cls.url_fields:
value = kwargs.pop(field, None)
if value is None:
cls._handle_wrong_field(field, ATTR_TYPE_URL)
fields[field] = value
# Create an instance of the model class and make the GET request
model = cls(**fields)
model._populate(**kwargs)
return model | def function[get, parameter[cls]]:
constant[Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2)
]
variable[fields] assign[=] dictionary[[], []]
for taget[name[field]] in starred[name[cls].url_fields] begin[:]
variable[value] assign[=] call[name[kwargs].pop, parameter[name[field], constant[None]]]
if compare[name[value] is constant[None]] begin[:]
call[name[cls]._handle_wrong_field, parameter[name[field], name[ATTR_TYPE_URL]]]
call[name[fields]][name[field]] assign[=] name[value]
variable[model] assign[=] call[name[cls], parameter[]]
call[name[model]._populate, parameter[]]
return[name[model]] | keyword[def] identifier[get] ( identifier[cls] ,** identifier[kwargs] ):
literal[string]
identifier[fields] ={}
keyword[for] identifier[field] keyword[in] identifier[cls] . identifier[url_fields] :
identifier[value] = identifier[kwargs] . identifier[pop] ( identifier[field] , keyword[None] )
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[cls] . identifier[_handle_wrong_field] ( identifier[field] , identifier[ATTR_TYPE_URL] )
identifier[fields] [ identifier[field] ]= identifier[value]
identifier[model] = identifier[cls] (** identifier[fields] )
identifier[model] . identifier[_populate] (** identifier[kwargs] )
keyword[return] identifier[model] | def get(cls, **kwargs):
"""Retrieve an object by making a GET request to Transifex.
Each value in `kwargs` that corresponds to a field
defined in `self.url_fields` will be used in the URL path
of the request, so that a particular entry of this model
is identified and retrieved.
Raises:
AttributeError: if not all values for parameters in `url_fields`
are passed as kwargs
txlib.http.exceptions.NotFoundError: if the object with these
attributes is not found on the remote server
txlib.http.exceptions.ServerError subclass: depending on
the particular server response
Example:
# Note: also catch exceptions
>>> obj = MyModel.get(attr1=value1, attr2=value2)
"""
fields = {}
for field in cls.url_fields:
value = kwargs.pop(field, None)
if value is None:
cls._handle_wrong_field(field, ATTR_TYPE_URL) # depends on [control=['if'], data=[]]
fields[field] = value # depends on [control=['for'], data=['field']]
# Create an instance of the model class and make the GET request
model = cls(**fields)
model._populate(**kwargs)
return model |
def query_dqsegdb(cls, flags, *args, **kwargs):
"""Query the advanced LIGO DQSegDB for a list of flags.
Parameters
----------
flags : `iterable`
A list of flag names for which to query.
*args
Either, two `float`-like numbers indicating the
GPS [start, stop) interval, or a `SegmentList`
defining a number of summary segments.
on_error : `str`
how to handle an error querying for one flag, one of
- `'raise'` (default): raise the Exception
- `'warn'`: print a warning
- `'ignore'`: move onto the next flag as if nothing happened
url : `str`, optional
URL of the segment database, defaults to
``$DEFAULT_SEGMENT_SERVER`` environment variable, or
``'https://segments.ligo.org'``
Returns
-------
flagdict : `DataQualityDict`
An ordered `DataQualityDict` of (name, `DataQualityFlag`)
pairs.
"""
# check on_error flag
on_error = kwargs.pop('on_error', 'raise').lower()
if on_error not in ['raise', 'warn', 'ignore']:
raise ValueError("on_error must be one of 'raise', 'warn', "
"or 'ignore'")
# parse segments
qsegs = _parse_query_segments(args, cls.query_dqsegdb)
# set up threading
inq = Queue()
outq = Queue()
for i in range(len(flags)):
t = _QueryDQSegDBThread(inq, outq, qsegs, **kwargs)
t.setDaemon(True)
t.start()
for i, flag in enumerate(flags):
inq.put((i, flag))
# capture output
inq.join()
outq.join()
new = cls()
results = list(zip(*sorted([outq.get() for i in range(len(flags))],
key=lambda x: x[0])))[1]
for result, flag in zip(results, flags):
if isinstance(result, Exception):
result.args = ('%s [%s]' % (str(result), str(flag)),)
if on_error == 'ignore':
pass
elif on_error == 'warn':
warnings.warn(str(result))
else:
raise result
else:
new[flag] = result
return new | def function[query_dqsegdb, parameter[cls, flags]]:
constant[Query the advanced LIGO DQSegDB for a list of flags.
Parameters
----------
flags : `iterable`
A list of flag names for which to query.
*args
Either, two `float`-like numbers indicating the
GPS [start, stop) interval, or a `SegmentList`
defining a number of summary segments.
on_error : `str`
how to handle an error querying for one flag, one of
- `'raise'` (default): raise the Exception
- `'warn'`: print a warning
- `'ignore'`: move onto the next flag as if nothing happened
url : `str`, optional
URL of the segment database, defaults to
``$DEFAULT_SEGMENT_SERVER`` environment variable, or
``'https://segments.ligo.org'``
Returns
-------
flagdict : `DataQualityDict`
An ordered `DataQualityDict` of (name, `DataQualityFlag`)
pairs.
]
variable[on_error] assign[=] call[call[name[kwargs].pop, parameter[constant[on_error], constant[raise]]].lower, parameter[]]
if compare[name[on_error] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18f810e80>, <ast.Constant object at 0x7da18f8107c0>, <ast.Constant object at 0x7da18f813550>]]] begin[:]
<ast.Raise object at 0x7da18f810bb0>
variable[qsegs] assign[=] call[name[_parse_query_segments], parameter[name[args], name[cls].query_dqsegdb]]
variable[inq] assign[=] call[name[Queue], parameter[]]
variable[outq] assign[=] call[name[Queue], parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[flags]]]]]] begin[:]
variable[t] assign[=] call[name[_QueryDQSegDBThread], parameter[name[inq], name[outq], name[qsegs]]]
call[name[t].setDaemon, parameter[constant[True]]]
call[name[t].start, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f813f70>, <ast.Name object at 0x7da18f812d70>]]] in starred[call[name[enumerate], parameter[name[flags]]]] begin[:]
call[name[inq].put, parameter[tuple[[<ast.Name object at 0x7da18f810ee0>, <ast.Name object at 0x7da18f8110c0>]]]]
call[name[inq].join, parameter[]]
call[name[outq].join, parameter[]]
variable[new] assign[=] call[name[cls], parameter[]]
variable[results] assign[=] call[call[name[list], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da18f812110>]]]]][constant[1]]
for taget[tuple[[<ast.Name object at 0x7da18f813430>, <ast.Name object at 0x7da18f812bf0>]]] in starred[call[name[zip], parameter[name[results], name[flags]]]] begin[:]
if call[name[isinstance], parameter[name[result], name[Exception]]] begin[:]
name[result].args assign[=] tuple[[<ast.BinOp object at 0x7da18f812590>]]
if compare[name[on_error] equal[==] constant[ignore]] begin[:]
pass
return[name[new]] | keyword[def] identifier[query_dqsegdb] ( identifier[cls] , identifier[flags] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[on_error] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] ). identifier[lower] ()
keyword[if] identifier[on_error] keyword[not] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
identifier[qsegs] = identifier[_parse_query_segments] ( identifier[args] , identifier[cls] . identifier[query_dqsegdb] )
identifier[inq] = identifier[Queue] ()
identifier[outq] = identifier[Queue] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[flags] )):
identifier[t] = identifier[_QueryDQSegDBThread] ( identifier[inq] , identifier[outq] , identifier[qsegs] ,** identifier[kwargs] )
identifier[t] . identifier[setDaemon] ( keyword[True] )
identifier[t] . identifier[start] ()
keyword[for] identifier[i] , identifier[flag] keyword[in] identifier[enumerate] ( identifier[flags] ):
identifier[inq] . identifier[put] (( identifier[i] , identifier[flag] ))
identifier[inq] . identifier[join] ()
identifier[outq] . identifier[join] ()
identifier[new] = identifier[cls] ()
identifier[results] = identifier[list] ( identifier[zip] (* identifier[sorted] ([ identifier[outq] . identifier[get] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[flags] ))],
identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ])))[ literal[int] ]
keyword[for] identifier[result] , identifier[flag] keyword[in] identifier[zip] ( identifier[results] , identifier[flags] ):
keyword[if] identifier[isinstance] ( identifier[result] , identifier[Exception] ):
identifier[result] . identifier[args] =( literal[string] %( identifier[str] ( identifier[result] ), identifier[str] ( identifier[flag] )),)
keyword[if] identifier[on_error] == literal[string] :
keyword[pass]
keyword[elif] identifier[on_error] == literal[string] :
identifier[warnings] . identifier[warn] ( identifier[str] ( identifier[result] ))
keyword[else] :
keyword[raise] identifier[result]
keyword[else] :
identifier[new] [ identifier[flag] ]= identifier[result]
keyword[return] identifier[new] | def query_dqsegdb(cls, flags, *args, **kwargs):
"""Query the advanced LIGO DQSegDB for a list of flags.
Parameters
----------
flags : `iterable`
A list of flag names for which to query.
*args
Either, two `float`-like numbers indicating the
GPS [start, stop) interval, or a `SegmentList`
defining a number of summary segments.
on_error : `str`
how to handle an error querying for one flag, one of
- `'raise'` (default): raise the Exception
- `'warn'`: print a warning
- `'ignore'`: move onto the next flag as if nothing happened
url : `str`, optional
URL of the segment database, defaults to
``$DEFAULT_SEGMENT_SERVER`` environment variable, or
``'https://segments.ligo.org'``
Returns
-------
flagdict : `DataQualityDict`
An ordered `DataQualityDict` of (name, `DataQualityFlag`)
pairs.
"""
# check on_error flag
on_error = kwargs.pop('on_error', 'raise').lower()
if on_error not in ['raise', 'warn', 'ignore']:
raise ValueError("on_error must be one of 'raise', 'warn', or 'ignore'") # depends on [control=['if'], data=[]]
# parse segments
qsegs = _parse_query_segments(args, cls.query_dqsegdb)
# set up threading
inq = Queue()
outq = Queue()
for i in range(len(flags)):
t = _QueryDQSegDBThread(inq, outq, qsegs, **kwargs)
t.setDaemon(True)
t.start() # depends on [control=['for'], data=[]]
for (i, flag) in enumerate(flags):
inq.put((i, flag)) # depends on [control=['for'], data=[]]
# capture output
inq.join()
outq.join()
new = cls()
results = list(zip(*sorted([outq.get() for i in range(len(flags))], key=lambda x: x[0])))[1]
for (result, flag) in zip(results, flags):
if isinstance(result, Exception):
result.args = ('%s [%s]' % (str(result), str(flag)),)
if on_error == 'ignore':
pass # depends on [control=['if'], data=[]]
elif on_error == 'warn':
warnings.warn(str(result)) # depends on [control=['if'], data=[]]
else:
raise result # depends on [control=['if'], data=[]]
else:
new[flag] = result # depends on [control=['for'], data=[]]
return new |
def get_carrier_concentration(self):
"""
gives the carrier concentration (in cm^-3)
Returns
a dictionary {temp:[]} with an array of carrier concentration
(in cm^-3) at each temperature
The array relates to each step of electron chemical potential
"""
return {temp: [1e24 * i / self.vol for i in self._carrier_conc[temp]]
for temp in self._carrier_conc} | def function[get_carrier_concentration, parameter[self]]:
constant[
gives the carrier concentration (in cm^-3)
Returns
a dictionary {temp:[]} with an array of carrier concentration
(in cm^-3) at each temperature
The array relates to each step of electron chemical potential
]
return[<ast.DictComp object at 0x7da20e957040>] | keyword[def] identifier[get_carrier_concentration] ( identifier[self] ):
literal[string]
keyword[return] { identifier[temp] :[ literal[int] * identifier[i] / identifier[self] . identifier[vol] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_carrier_conc] [ identifier[temp] ]]
keyword[for] identifier[temp] keyword[in] identifier[self] . identifier[_carrier_conc] } | def get_carrier_concentration(self):
"""
gives the carrier concentration (in cm^-3)
Returns
a dictionary {temp:[]} with an array of carrier concentration
(in cm^-3) at each temperature
The array relates to each step of electron chemical potential
"""
return {temp: [1e+24 * i / self.vol for i in self._carrier_conc[temp]] for temp in self._carrier_conc} |
def get_urls(self):
"""Prepend `get_urls` with our own patterns."""
urls = super(BaseDjangoObjectActions, self).get_urls()
return self._get_action_urls() + urls | def function[get_urls, parameter[self]]:
constant[Prepend `get_urls` with our own patterns.]
variable[urls] assign[=] call[call[name[super], parameter[name[BaseDjangoObjectActions], name[self]]].get_urls, parameter[]]
return[binary_operation[call[name[self]._get_action_urls, parameter[]] + name[urls]]] | keyword[def] identifier[get_urls] ( identifier[self] ):
literal[string]
identifier[urls] = identifier[super] ( identifier[BaseDjangoObjectActions] , identifier[self] ). identifier[get_urls] ()
keyword[return] identifier[self] . identifier[_get_action_urls] ()+ identifier[urls] | def get_urls(self):
"""Prepend `get_urls` with our own patterns."""
urls = super(BaseDjangoObjectActions, self).get_urls()
return self._get_action_urls() + urls |
def read(path, default=None, encoding='utf8'):
"""Read encoded contents from specified path or return default."""
if not path:
return default
try:
with io.open(path, mode='r', encoding=encoding) as contents:
return contents.read()
except IOError:
if default is not None:
return default
raise | def function[read, parameter[path, default, encoding]]:
constant[Read encoded contents from specified path or return default.]
if <ast.UnaryOp object at 0x7da20cabef50> begin[:]
return[name[default]]
<ast.Try object at 0x7da20cabc880> | keyword[def] identifier[read] ( identifier[path] , identifier[default] = keyword[None] , identifier[encoding] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[path] :
keyword[return] identifier[default]
keyword[try] :
keyword[with] identifier[io] . identifier[open] ( identifier[path] , identifier[mode] = literal[string] , identifier[encoding] = identifier[encoding] ) keyword[as] identifier[contents] :
keyword[return] identifier[contents] . identifier[read] ()
keyword[except] identifier[IOError] :
keyword[if] identifier[default] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[default]
keyword[raise] | def read(path, default=None, encoding='utf8'):
"""Read encoded contents from specified path or return default."""
if not path:
return default # depends on [control=['if'], data=[]]
try:
with io.open(path, mode='r', encoding=encoding) as contents:
return contents.read() # depends on [control=['with'], data=['contents']] # depends on [control=['try'], data=[]]
except IOError:
if default is not None:
return default # depends on [control=['if'], data=['default']]
raise # depends on [control=['except'], data=[]] |
def new_profile(self):
"""Create a new profile by name.
"""
# noinspection PyCallByClass,PyTypeChecker
dir = os.path.join(QgsApplication.qgisSettingsDirPath(),
'inasafe', 'minimum_needs')
file_name, __ = QFileDialog.getSaveFileName(
self,
self.tr('Create a minimum needs profile'),
expanduser(dir),
self.tr('JSON files (*.json *.JSON)'),
options=QFileDialog.DontUseNativeDialog)
if not file_name:
return
file_name = basename(file_name)
if self.profile_combo.findText(file_name) == -1:
minimum_needs = {
'resources': [], 'provenance': '', 'profile': file_name}
self.minimum_needs.update_minimum_needs(minimum_needs)
self.minimum_needs.save_profile(file_name)
self.profile_combo.addItem(file_name)
self.clear_resource_list()
self.profile_combo.setCurrentIndex(
self.profile_combo.findText(file_name))
else:
self.profile_combo.setCurrentIndex(
self.profile_combo.findText(file_name))
self.select_profile_by_name(file_name) | def function[new_profile, parameter[self]]:
constant[Create a new profile by name.
]
variable[dir] assign[=] call[name[os].path.join, parameter[call[name[QgsApplication].qgisSettingsDirPath, parameter[]], constant[inasafe], constant[minimum_needs]]]
<ast.Tuple object at 0x7da204963fd0> assign[=] call[name[QFileDialog].getSaveFileName, parameter[name[self], call[name[self].tr, parameter[constant[Create a minimum needs profile]]], call[name[expanduser], parameter[name[dir]]], call[name[self].tr, parameter[constant[JSON files (*.json *.JSON)]]]]]
if <ast.UnaryOp object at 0x7da2049616c0> begin[:]
return[None]
variable[file_name] assign[=] call[name[basename], parameter[name[file_name]]]
if compare[call[name[self].profile_combo.findText, parameter[name[file_name]]] equal[==] <ast.UnaryOp object at 0x7da204961300>] begin[:]
variable[minimum_needs] assign[=] dictionary[[<ast.Constant object at 0x7da204962980>, <ast.Constant object at 0x7da204961f30>, <ast.Constant object at 0x7da204961810>], [<ast.List object at 0x7da204960760>, <ast.Constant object at 0x7da2049608b0>, <ast.Name object at 0x7da204963610>]]
call[name[self].minimum_needs.update_minimum_needs, parameter[name[minimum_needs]]]
call[name[self].minimum_needs.save_profile, parameter[name[file_name]]]
call[name[self].profile_combo.addItem, parameter[name[file_name]]]
call[name[self].clear_resource_list, parameter[]]
call[name[self].profile_combo.setCurrentIndex, parameter[call[name[self].profile_combo.findText, parameter[name[file_name]]]]] | keyword[def] identifier[new_profile] ( identifier[self] ):
literal[string]
identifier[dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[QgsApplication] . identifier[qgisSettingsDirPath] (),
literal[string] , literal[string] )
identifier[file_name] , identifier[__] = identifier[QFileDialog] . identifier[getSaveFileName] (
identifier[self] ,
identifier[self] . identifier[tr] ( literal[string] ),
identifier[expanduser] ( identifier[dir] ),
identifier[self] . identifier[tr] ( literal[string] ),
identifier[options] = identifier[QFileDialog] . identifier[DontUseNativeDialog] )
keyword[if] keyword[not] identifier[file_name] :
keyword[return]
identifier[file_name] = identifier[basename] ( identifier[file_name] )
keyword[if] identifier[self] . identifier[profile_combo] . identifier[findText] ( identifier[file_name] )==- literal[int] :
identifier[minimum_needs] ={
literal[string] :[], literal[string] : literal[string] , literal[string] : identifier[file_name] }
identifier[self] . identifier[minimum_needs] . identifier[update_minimum_needs] ( identifier[minimum_needs] )
identifier[self] . identifier[minimum_needs] . identifier[save_profile] ( identifier[file_name] )
identifier[self] . identifier[profile_combo] . identifier[addItem] ( identifier[file_name] )
identifier[self] . identifier[clear_resource_list] ()
identifier[self] . identifier[profile_combo] . identifier[setCurrentIndex] (
identifier[self] . identifier[profile_combo] . identifier[findText] ( identifier[file_name] ))
keyword[else] :
identifier[self] . identifier[profile_combo] . identifier[setCurrentIndex] (
identifier[self] . identifier[profile_combo] . identifier[findText] ( identifier[file_name] ))
identifier[self] . identifier[select_profile_by_name] ( identifier[file_name] ) | def new_profile(self):
"""Create a new profile by name.
"""
# noinspection PyCallByClass,PyTypeChecker
dir = os.path.join(QgsApplication.qgisSettingsDirPath(), 'inasafe', 'minimum_needs')
(file_name, __) = QFileDialog.getSaveFileName(self, self.tr('Create a minimum needs profile'), expanduser(dir), self.tr('JSON files (*.json *.JSON)'), options=QFileDialog.DontUseNativeDialog)
if not file_name:
return # depends on [control=['if'], data=[]]
file_name = basename(file_name)
if self.profile_combo.findText(file_name) == -1:
minimum_needs = {'resources': [], 'provenance': '', 'profile': file_name}
self.minimum_needs.update_minimum_needs(minimum_needs)
self.minimum_needs.save_profile(file_name)
self.profile_combo.addItem(file_name)
self.clear_resource_list()
self.profile_combo.setCurrentIndex(self.profile_combo.findText(file_name)) # depends on [control=['if'], data=[]]
else:
self.profile_combo.setCurrentIndex(self.profile_combo.findText(file_name))
self.select_profile_by_name(file_name) |
def mmi_to_raster(self, force_flag=False, algorithm=USE_ASCII):
"""Convert the grid.xml's mmi column to a raster using gdal_grid.
A geotiff file will be created.
Unfortunately no python bindings exist for doing this so we are
going to do it using a shell call.
.. see also:: http://www.gdal.org/gdal_grid.html
Example of the gdal_grid call we generate::
gdal_grid -zfield "mmi" -a invdist:power=2.0:smoothing=1.0 \
-txe 126.29 130.29 -tye 0.802 4.798 -outsize 400 400 -of GTiff \
-ot Float16 -l mmi mmi.vrt mmi.tif
.. note:: It is assumed that gdal_grid is in your path.
:param force_flag: Whether to force the regeneration of the output
file. Defaults to False.
:type force_flag: bool
:param algorithm: Which re-sampling algorithm to use.
valid options are 'nearest' (for nearest neighbour), 'invdist'
(for inverse distance), 'average' (for moving average). Defaults
to 'nearest' if not specified. Note that passing re-sampling alg
parameters is currently not supported. If None is passed it will
be replaced with 'use_ascii'.
'use_ascii' algorithm will convert the mmi grid to ascii file
then convert it to raster using gdal_translate.
:type algorithm: str
:returns: Path to the resulting tif file.
:rtype: str
.. note:: For interest you can also make quite beautiful smoothed
raster using this:
gdal_grid -zfield "mmi" -a_srs EPSG:4326
-a invdist:power=2.0:smoothing=1.0 -txe 122.45 126.45
-tye -2.21 1.79 -outsize 400 400 -of GTiff
-ot Float16 -l mmi mmi.vrt mmi-trippy.tif
"""
LOGGER.debug('mmi_to_raster requested.')
if algorithm is None:
algorithm = USE_ASCII
if self.algorithm_name:
tif_path = os.path.join(
self.output_dir, '%s-%s.tif' % (
self.output_basename, algorithm))
else:
tif_path = os.path.join(
self.output_dir, '%s.tif' % self.output_basename)
# short circuit if the tif is already created.
if os.path.exists(tif_path) and force_flag is not True:
return tif_path
if algorithm == USE_ASCII:
# Convert to ascii
ascii_path = self.mmi_to_ascii(True)
# Creating command to convert to tif
command = (
(
'%(gdal_translate)s -a_srs EPSG:4326 '
'"%(ascii)s" "%(tif)s"'
) % {
'gdal_translate': which('gdal_translate')[0],
'ascii': ascii_path,
'tif': tif_path
}
)
LOGGER.info('Created this gdal command:\n%s' % command)
# Now run GDAL warp scottie...
self._run_command(command)
else:
# Ensure the vrt mmi file exists (it will generate csv too if
# needed)
vrt_path = self.mmi_to_vrt(force_flag)
# now generate the tif using default nearest neighbour
# interpolation options. This gives us the same output as the
# mmi.grd generated by the earthquake server.
if INVDIST in algorithm:
algorithm = 'invdist:power=2.0:smoothing=1.0'
command = (
(
'%(gdal_grid)s -a %(alg)s -zfield "mmi" -txe %(xMin)s '
'%(xMax)s -tye %(yMin)s %(yMax)s -outsize %(dimX)i '
'%(dimY)i -of GTiff -ot Float16 -a_srs EPSG:4326 -l mmi '
'"%(vrt)s" "%(tif)s"'
) % {
'gdal_grid': which('gdal_grid')[0],
'alg': algorithm,
'xMin': self.x_minimum,
'xMax': self.x_maximum,
'yMin': self.y_minimum,
'yMax': self.y_maximum,
'dimX': self.columns,
'dimY': self.rows,
'vrt': vrt_path,
'tif': tif_path
}
)
LOGGER.info('Created this gdal command:\n%s' % command)
# Now run GDAL warp scottie...
self._run_command(command)
# We will use keywords file name with simple algorithm name since
# it will raise an error in windows related to having double
# colon in path
if INVDIST in algorithm:
algorithm = 'invdist'
# copy the keywords file from fixtures for this layer
self.create_keyword_file(algorithm)
# Lastly copy over the standard qml (QGIS Style file) for the mmi.tif
if self.algorithm_name:
qml_path = os.path.join(
self.output_dir, '%s-%s.qml' % (
self.output_basename, algorithm))
else:
qml_path = os.path.join(
self.output_dir, '%s.qml' % self.output_basename)
qml_source_path = resources_path('converter_data', 'mmi.qml')
shutil.copyfile(qml_source_path, qml_path)
return tif_path | def function[mmi_to_raster, parameter[self, force_flag, algorithm]]:
constant[Convert the grid.xml's mmi column to a raster using gdal_grid.
A geotiff file will be created.
Unfortunately no python bindings exist for doing this so we are
going to do it using a shell call.
.. see also:: http://www.gdal.org/gdal_grid.html
Example of the gdal_grid call we generate::
gdal_grid -zfield "mmi" -a invdist:power=2.0:smoothing=1.0 -txe 126.29 130.29 -tye 0.802 4.798 -outsize 400 400 -of GTiff -ot Float16 -l mmi mmi.vrt mmi.tif
.. note:: It is assumed that gdal_grid is in your path.
:param force_flag: Whether to force the regeneration of the output
file. Defaults to False.
:type force_flag: bool
:param algorithm: Which re-sampling algorithm to use.
valid options are 'nearest' (for nearest neighbour), 'invdist'
(for inverse distance), 'average' (for moving average). Defaults
to 'nearest' if not specified. Note that passing re-sampling alg
parameters is currently not supported. If None is passed it will
be replaced with 'use_ascii'.
'use_ascii' algorithm will convert the mmi grid to ascii file
then convert it to raster using gdal_translate.
:type algorithm: str
:returns: Path to the resulting tif file.
:rtype: str
.. note:: For interest you can also make quite beautiful smoothed
raster using this:
gdal_grid -zfield "mmi" -a_srs EPSG:4326
-a invdist:power=2.0:smoothing=1.0 -txe 122.45 126.45
-tye -2.21 1.79 -outsize 400 400 -of GTiff
-ot Float16 -l mmi mmi.vrt mmi-trippy.tif
]
call[name[LOGGER].debug, parameter[constant[mmi_to_raster requested.]]]
if compare[name[algorithm] is constant[None]] begin[:]
variable[algorithm] assign[=] name[USE_ASCII]
if name[self].algorithm_name begin[:]
variable[tif_path] assign[=] call[name[os].path.join, parameter[name[self].output_dir, binary_operation[constant[%s-%s.tif] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0ca2230>, <ast.Name object at 0x7da1b0ca22f0>]]]]]
if <ast.BoolOp object at 0x7da1b0ca1f90> begin[:]
return[name[tif_path]]
if compare[name[algorithm] equal[==] name[USE_ASCII]] begin[:]
variable[ascii_path] assign[=] call[name[self].mmi_to_ascii, parameter[constant[True]]]
variable[command] assign[=] binary_operation[constant[%(gdal_translate)s -a_srs EPSG:4326 "%(ascii)s" "%(tif)s"] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0ca2b90>, <ast.Constant object at 0x7da1b0ca38b0>, <ast.Constant object at 0x7da1b0ca3820>], [<ast.Subscript object at 0x7da1b0ca2b30>, <ast.Name object at 0x7da1b0ca2ec0>, <ast.Name object at 0x7da1b0ca3eb0>]]]
call[name[LOGGER].info, parameter[binary_operation[constant[Created this gdal command:
%s] <ast.Mod object at 0x7da2590d6920> name[command]]]]
call[name[self]._run_command, parameter[name[command]]]
call[name[self].create_keyword_file, parameter[name[algorithm]]]
if name[self].algorithm_name begin[:]
variable[qml_path] assign[=] call[name[os].path.join, parameter[name[self].output_dir, binary_operation[constant[%s-%s.qml] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0ca3280>, <ast.Name object at 0x7da1b0ca0a60>]]]]]
variable[qml_source_path] assign[=] call[name[resources_path], parameter[constant[converter_data], constant[mmi.qml]]]
call[name[shutil].copyfile, parameter[name[qml_source_path], name[qml_path]]]
return[name[tif_path]] | keyword[def] identifier[mmi_to_raster] ( identifier[self] , identifier[force_flag] = keyword[False] , identifier[algorithm] = identifier[USE_ASCII] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] identifier[algorithm] keyword[is] keyword[None] :
identifier[algorithm] = identifier[USE_ASCII]
keyword[if] identifier[self] . identifier[algorithm_name] :
identifier[tif_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[output_dir] , literal[string] %(
identifier[self] . identifier[output_basename] , identifier[algorithm] ))
keyword[else] :
identifier[tif_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[output_dir] , literal[string] % identifier[self] . identifier[output_basename] )
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[tif_path] ) keyword[and] identifier[force_flag] keyword[is] keyword[not] keyword[True] :
keyword[return] identifier[tif_path]
keyword[if] identifier[algorithm] == identifier[USE_ASCII] :
identifier[ascii_path] = identifier[self] . identifier[mmi_to_ascii] ( keyword[True] )
identifier[command] =(
(
literal[string]
literal[string]
)%{
literal[string] : identifier[which] ( literal[string] )[ literal[int] ],
literal[string] : identifier[ascii_path] ,
literal[string] : identifier[tif_path]
}
)
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[command] )
identifier[self] . identifier[_run_command] ( identifier[command] )
keyword[else] :
identifier[vrt_path] = identifier[self] . identifier[mmi_to_vrt] ( identifier[force_flag] )
keyword[if] identifier[INVDIST] keyword[in] identifier[algorithm] :
identifier[algorithm] = literal[string]
identifier[command] =(
(
literal[string]
literal[string]
literal[string]
literal[string]
)%{
literal[string] : identifier[which] ( literal[string] )[ literal[int] ],
literal[string] : identifier[algorithm] ,
literal[string] : identifier[self] . identifier[x_minimum] ,
literal[string] : identifier[self] . identifier[x_maximum] ,
literal[string] : identifier[self] . identifier[y_minimum] ,
literal[string] : identifier[self] . identifier[y_maximum] ,
literal[string] : identifier[self] . identifier[columns] ,
literal[string] : identifier[self] . identifier[rows] ,
literal[string] : identifier[vrt_path] ,
literal[string] : identifier[tif_path]
}
)
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[command] )
identifier[self] . identifier[_run_command] ( identifier[command] )
keyword[if] identifier[INVDIST] keyword[in] identifier[algorithm] :
identifier[algorithm] = literal[string]
identifier[self] . identifier[create_keyword_file] ( identifier[algorithm] )
keyword[if] identifier[self] . identifier[algorithm_name] :
identifier[qml_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[output_dir] , literal[string] %(
identifier[self] . identifier[output_basename] , identifier[algorithm] ))
keyword[else] :
identifier[qml_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[output_dir] , literal[string] % identifier[self] . identifier[output_basename] )
identifier[qml_source_path] = identifier[resources_path] ( literal[string] , literal[string] )
identifier[shutil] . identifier[copyfile] ( identifier[qml_source_path] , identifier[qml_path] )
keyword[return] identifier[tif_path] | def mmi_to_raster(self, force_flag=False, algorithm=USE_ASCII):
"""Convert the grid.xml's mmi column to a raster using gdal_grid.
A geotiff file will be created.
Unfortunately no python bindings exist for doing this so we are
going to do it using a shell call.
.. see also:: http://www.gdal.org/gdal_grid.html
Example of the gdal_grid call we generate::
gdal_grid -zfield "mmi" -a invdist:power=2.0:smoothing=1.0 -txe 126.29 130.29 -tye 0.802 4.798 -outsize 400 400 -of GTiff -ot Float16 -l mmi mmi.vrt mmi.tif
.. note:: It is assumed that gdal_grid is in your path.
:param force_flag: Whether to force the regeneration of the output
file. Defaults to False.
:type force_flag: bool
:param algorithm: Which re-sampling algorithm to use.
valid options are 'nearest' (for nearest neighbour), 'invdist'
(for inverse distance), 'average' (for moving average). Defaults
to 'nearest' if not specified. Note that passing re-sampling alg
parameters is currently not supported. If None is passed it will
be replaced with 'use_ascii'.
'use_ascii' algorithm will convert the mmi grid to ascii file
then convert it to raster using gdal_translate.
:type algorithm: str
:returns: Path to the resulting tif file.
:rtype: str
.. note:: For interest you can also make quite beautiful smoothed
raster using this:
gdal_grid -zfield "mmi" -a_srs EPSG:4326
-a invdist:power=2.0:smoothing=1.0 -txe 122.45 126.45
-tye -2.21 1.79 -outsize 400 400 -of GTiff
-ot Float16 -l mmi mmi.vrt mmi-trippy.tif
"""
LOGGER.debug('mmi_to_raster requested.')
if algorithm is None:
algorithm = USE_ASCII # depends on [control=['if'], data=['algorithm']]
if self.algorithm_name:
tif_path = os.path.join(self.output_dir, '%s-%s.tif' % (self.output_basename, algorithm)) # depends on [control=['if'], data=[]]
else:
tif_path = os.path.join(self.output_dir, '%s.tif' % self.output_basename)
# short circuit if the tif is already created.
if os.path.exists(tif_path) and force_flag is not True:
return tif_path # depends on [control=['if'], data=[]]
if algorithm == USE_ASCII:
# Convert to ascii
ascii_path = self.mmi_to_ascii(True)
# Creating command to convert to tif
command = '%(gdal_translate)s -a_srs EPSG:4326 "%(ascii)s" "%(tif)s"' % {'gdal_translate': which('gdal_translate')[0], 'ascii': ascii_path, 'tif': tif_path}
LOGGER.info('Created this gdal command:\n%s' % command)
# Now run GDAL warp scottie...
self._run_command(command) # depends on [control=['if'], data=[]]
else:
# Ensure the vrt mmi file exists (it will generate csv too if
# needed)
vrt_path = self.mmi_to_vrt(force_flag)
# now generate the tif using default nearest neighbour
# interpolation options. This gives us the same output as the
# mmi.grd generated by the earthquake server.
if INVDIST in algorithm:
algorithm = 'invdist:power=2.0:smoothing=1.0' # depends on [control=['if'], data=['algorithm']]
command = '%(gdal_grid)s -a %(alg)s -zfield "mmi" -txe %(xMin)s %(xMax)s -tye %(yMin)s %(yMax)s -outsize %(dimX)i %(dimY)i -of GTiff -ot Float16 -a_srs EPSG:4326 -l mmi "%(vrt)s" "%(tif)s"' % {'gdal_grid': which('gdal_grid')[0], 'alg': algorithm, 'xMin': self.x_minimum, 'xMax': self.x_maximum, 'yMin': self.y_minimum, 'yMax': self.y_maximum, 'dimX': self.columns, 'dimY': self.rows, 'vrt': vrt_path, 'tif': tif_path}
LOGGER.info('Created this gdal command:\n%s' % command)
# Now run GDAL warp scottie...
self._run_command(command)
# We will use keywords file name with simple algorithm name since
# it will raise an error in windows related to having double
# colon in path
if INVDIST in algorithm:
algorithm = 'invdist' # depends on [control=['if'], data=['algorithm']]
# copy the keywords file from fixtures for this layer
self.create_keyword_file(algorithm)
# Lastly copy over the standard qml (QGIS Style file) for the mmi.tif
if self.algorithm_name:
qml_path = os.path.join(self.output_dir, '%s-%s.qml' % (self.output_basename, algorithm)) # depends on [control=['if'], data=[]]
else:
qml_path = os.path.join(self.output_dir, '%s.qml' % self.output_basename)
qml_source_path = resources_path('converter_data', 'mmi.qml')
shutil.copyfile(qml_source_path, qml_path)
return tif_path |
def get_or_create_ap_election_meta(self, row, election):
"""
Gets or creates the APElectionMeta object for the given row of
AP data.
"""
APElectionMeta.objects.get_or_create(
ap_election_id=row["raceid"], election=election
) | def function[get_or_create_ap_election_meta, parameter[self, row, election]]:
constant[
Gets or creates the APElectionMeta object for the given row of
AP data.
]
call[name[APElectionMeta].objects.get_or_create, parameter[]] | keyword[def] identifier[get_or_create_ap_election_meta] ( identifier[self] , identifier[row] , identifier[election] ):
literal[string]
identifier[APElectionMeta] . identifier[objects] . identifier[get_or_create] (
identifier[ap_election_id] = identifier[row] [ literal[string] ], identifier[election] = identifier[election]
) | def get_or_create_ap_election_meta(self, row, election):
"""
Gets or creates the APElectionMeta object for the given row of
AP data.
"""
APElectionMeta.objects.get_or_create(ap_election_id=row['raceid'], election=election) |
def add_constraint(self, con):
""" Adds a constraint to the model.
"""
if isinstance(con, LinearConstraint):
N, M = con.A.shape
if con.name in [c.name for c in self.lin_constraints]:
logger.error("Constraint set named '%s' already exists."
% con.name)
return False
else:
con.i1 = self.lin_N# + 1
con.iN = self.lin_N + N - 1
nv = 0
for vs in con.vs:
nv = nv + self.get_var_N(vs)
if M != nv:
logger.error("Number of columns of A does not match number"
" of variables, A is %d x %d, nv = %d", N, M, nv)
self.lin_constraints.append(con)
elif isinstance(con, NonLinearConstraint):
N = con.N
if con.name in [c.name for c in self.nln_constraints]:
logger.error("Constraint set named '%s' already exists."
% con.name)
return False
else:
con.i1 = self.nln_N# + 1
con.iN = self.nln_N + N
self.nln_constraints.append(con)
else:
raise ValueError
return True | def function[add_constraint, parameter[self, con]]:
constant[ Adds a constraint to the model.
]
if call[name[isinstance], parameter[name[con], name[LinearConstraint]]] begin[:]
<ast.Tuple object at 0x7da1b2492980> assign[=] name[con].A.shape
if compare[name[con].name in <ast.ListComp object at 0x7da1b2490f40>] begin[:]
call[name[logger].error, parameter[binary_operation[constant[Constraint set named '%s' already exists.] <ast.Mod object at 0x7da2590d6920> name[con].name]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[add_constraint] ( identifier[self] , identifier[con] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[con] , identifier[LinearConstraint] ):
identifier[N] , identifier[M] = identifier[con] . identifier[A] . identifier[shape]
keyword[if] identifier[con] . identifier[name] keyword[in] [ identifier[c] . identifier[name] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[lin_constraints] ]:
identifier[logger] . identifier[error] ( literal[string]
% identifier[con] . identifier[name] )
keyword[return] keyword[False]
keyword[else] :
identifier[con] . identifier[i1] = identifier[self] . identifier[lin_N]
identifier[con] . identifier[iN] = identifier[self] . identifier[lin_N] + identifier[N] - literal[int]
identifier[nv] = literal[int]
keyword[for] identifier[vs] keyword[in] identifier[con] . identifier[vs] :
identifier[nv] = identifier[nv] + identifier[self] . identifier[get_var_N] ( identifier[vs] )
keyword[if] identifier[M] != identifier[nv] :
identifier[logger] . identifier[error] ( literal[string]
literal[string] , identifier[N] , identifier[M] , identifier[nv] )
identifier[self] . identifier[lin_constraints] . identifier[append] ( identifier[con] )
keyword[elif] identifier[isinstance] ( identifier[con] , identifier[NonLinearConstraint] ):
identifier[N] = identifier[con] . identifier[N]
keyword[if] identifier[con] . identifier[name] keyword[in] [ identifier[c] . identifier[name] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[nln_constraints] ]:
identifier[logger] . identifier[error] ( literal[string]
% identifier[con] . identifier[name] )
keyword[return] keyword[False]
keyword[else] :
identifier[con] . identifier[i1] = identifier[self] . identifier[nln_N]
identifier[con] . identifier[iN] = identifier[self] . identifier[nln_N] + identifier[N]
identifier[self] . identifier[nln_constraints] . identifier[append] ( identifier[con] )
keyword[else] :
keyword[raise] identifier[ValueError]
keyword[return] keyword[True] | def add_constraint(self, con):
""" Adds a constraint to the model.
"""
if isinstance(con, LinearConstraint):
(N, M) = con.A.shape
if con.name in [c.name for c in self.lin_constraints]:
logger.error("Constraint set named '%s' already exists." % con.name)
return False # depends on [control=['if'], data=[]]
else:
con.i1 = self.lin_N # + 1
con.iN = self.lin_N + N - 1
nv = 0
for vs in con.vs:
nv = nv + self.get_var_N(vs) # depends on [control=['for'], data=['vs']]
if M != nv:
logger.error('Number of columns of A does not match number of variables, A is %d x %d, nv = %d', N, M, nv) # depends on [control=['if'], data=['M', 'nv']]
self.lin_constraints.append(con) # depends on [control=['if'], data=[]]
elif isinstance(con, NonLinearConstraint):
N = con.N
if con.name in [c.name for c in self.nln_constraints]:
logger.error("Constraint set named '%s' already exists." % con.name)
return False # depends on [control=['if'], data=[]]
else:
con.i1 = self.nln_N # + 1
con.iN = self.nln_N + N
self.nln_constraints.append(con) # depends on [control=['if'], data=[]]
else:
raise ValueError
return True |
async def on_raw_part(self, message):
""" PART command. """
nick, metadata = self._parse_user(message.source)
channels = message.params[0].split(',')
if len(message.params) > 1:
reason = message.params[1]
else:
reason = None
self._sync_user(nick, metadata)
if self.is_same_nick(self.nickname, nick):
# We left the channel. Remove from channel list. :(
for channel in channels:
if self.in_channel(channel):
self._destroy_channel(channel)
await self.on_part(channel, nick, reason)
else:
# Someone else left. Remove them.
for channel in channels:
self._destroy_user(nick, channel)
await self.on_part(channel, nick, reason) | <ast.AsyncFunctionDef object at 0x7da20c6e6aa0> | keyword[async] keyword[def] identifier[on_raw_part] ( identifier[self] , identifier[message] ):
literal[string]
identifier[nick] , identifier[metadata] = identifier[self] . identifier[_parse_user] ( identifier[message] . identifier[source] )
identifier[channels] = identifier[message] . identifier[params] [ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[message] . identifier[params] )> literal[int] :
identifier[reason] = identifier[message] . identifier[params] [ literal[int] ]
keyword[else] :
identifier[reason] = keyword[None]
identifier[self] . identifier[_sync_user] ( identifier[nick] , identifier[metadata] )
keyword[if] identifier[self] . identifier[is_same_nick] ( identifier[self] . identifier[nickname] , identifier[nick] ):
keyword[for] identifier[channel] keyword[in] identifier[channels] :
keyword[if] identifier[self] . identifier[in_channel] ( identifier[channel] ):
identifier[self] . identifier[_destroy_channel] ( identifier[channel] )
keyword[await] identifier[self] . identifier[on_part] ( identifier[channel] , identifier[nick] , identifier[reason] )
keyword[else] :
keyword[for] identifier[channel] keyword[in] identifier[channels] :
identifier[self] . identifier[_destroy_user] ( identifier[nick] , identifier[channel] )
keyword[await] identifier[self] . identifier[on_part] ( identifier[channel] , identifier[nick] , identifier[reason] ) | async def on_raw_part(self, message):
""" PART command. """
(nick, metadata) = self._parse_user(message.source)
channels = message.params[0].split(',')
if len(message.params) > 1:
reason = message.params[1] # depends on [control=['if'], data=[]]
else:
reason = None
self._sync_user(nick, metadata)
if self.is_same_nick(self.nickname, nick):
# We left the channel. Remove from channel list. :(
for channel in channels:
if self.in_channel(channel):
self._destroy_channel(channel)
await self.on_part(channel, nick, reason) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['channel']] # depends on [control=['if'], data=[]]
else:
# Someone else left. Remove them.
for channel in channels:
self._destroy_user(nick, channel)
await self.on_part(channel, nick, reason) # depends on [control=['for'], data=['channel']] |
def check_lazy_load_sectie(f):
'''
Decorator function to lazy load a :class:`Sectie`.
'''
def wrapper(self):
sectie = self
if (getattr(sectie, '_%s' % f.__name__, None) is None):
log.debug('Lazy loading Sectie %s in Afdeling %d', sectie.id, sectie.afdeling.id)
sectie.check_gateway()
s = sectie.gateway.get_sectie_by_id_and_afdeling(
sectie.id, sectie.afdeling.id
)
sectie._centroid = s._centroid
sectie._bounding_box = s._bounding_box
return f(self)
return wrapper | def function[check_lazy_load_sectie, parameter[f]]:
constant[
Decorator function to lazy load a :class:`Sectie`.
]
def function[wrapper, parameter[self]]:
variable[sectie] assign[=] name[self]
if compare[call[name[getattr], parameter[name[sectie], binary_operation[constant[_%s] <ast.Mod object at 0x7da2590d6920> name[f].__name__], constant[None]]] is constant[None]] begin[:]
call[name[log].debug, parameter[constant[Lazy loading Sectie %s in Afdeling %d], name[sectie].id, name[sectie].afdeling.id]]
call[name[sectie].check_gateway, parameter[]]
variable[s] assign[=] call[name[sectie].gateway.get_sectie_by_id_and_afdeling, parameter[name[sectie].id, name[sectie].afdeling.id]]
name[sectie]._centroid assign[=] name[s]._centroid
name[sectie]._bounding_box assign[=] name[s]._bounding_box
return[call[name[f], parameter[name[self]]]]
return[name[wrapper]] | keyword[def] identifier[check_lazy_load_sectie] ( identifier[f] ):
literal[string]
keyword[def] identifier[wrapper] ( identifier[self] ):
identifier[sectie] = identifier[self]
keyword[if] ( identifier[getattr] ( identifier[sectie] , literal[string] % identifier[f] . identifier[__name__] , keyword[None] ) keyword[is] keyword[None] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[sectie] . identifier[id] , identifier[sectie] . identifier[afdeling] . identifier[id] )
identifier[sectie] . identifier[check_gateway] ()
identifier[s] = identifier[sectie] . identifier[gateway] . identifier[get_sectie_by_id_and_afdeling] (
identifier[sectie] . identifier[id] , identifier[sectie] . identifier[afdeling] . identifier[id]
)
identifier[sectie] . identifier[_centroid] = identifier[s] . identifier[_centroid]
identifier[sectie] . identifier[_bounding_box] = identifier[s] . identifier[_bounding_box]
keyword[return] identifier[f] ( identifier[self] )
keyword[return] identifier[wrapper] | def check_lazy_load_sectie(f):
"""
Decorator function to lazy load a :class:`Sectie`.
"""
def wrapper(self):
sectie = self
if getattr(sectie, '_%s' % f.__name__, None) is None:
log.debug('Lazy loading Sectie %s in Afdeling %d', sectie.id, sectie.afdeling.id)
sectie.check_gateway()
s = sectie.gateway.get_sectie_by_id_and_afdeling(sectie.id, sectie.afdeling.id)
sectie._centroid = s._centroid
sectie._bounding_box = s._bounding_box # depends on [control=['if'], data=[]]
return f(self)
return wrapper |
def UQRatio(s1, s2, full_process=True):
"""
Unicode quick ratio
Calls QRatio with force_ascii set to False
:param s1:
:param s2:
:return: similarity ratio
"""
return QRatio(s1, s2, force_ascii=False, full_process=full_process) | def function[UQRatio, parameter[s1, s2, full_process]]:
constant[
Unicode quick ratio
Calls QRatio with force_ascii set to False
:param s1:
:param s2:
:return: similarity ratio
]
return[call[name[QRatio], parameter[name[s1], name[s2]]]] | keyword[def] identifier[UQRatio] ( identifier[s1] , identifier[s2] , identifier[full_process] = keyword[True] ):
literal[string]
keyword[return] identifier[QRatio] ( identifier[s1] , identifier[s2] , identifier[force_ascii] = keyword[False] , identifier[full_process] = identifier[full_process] ) | def UQRatio(s1, s2, full_process=True):
"""
Unicode quick ratio
Calls QRatio with force_ascii set to False
:param s1:
:param s2:
:return: similarity ratio
"""
return QRatio(s1, s2, force_ascii=False, full_process=full_process) |
def _batches(self, request, points_per_request):
"""
Generator for creating 'request batches'. Each batch contains a maximum of "points_per_request"
points to read.
:params: request a list of point_name as a list
:params: (int) points_per_request
:returns: (iter) list of point_name of size <= points_per_request
"""
for i in range(0, len(request), points_per_request):
yield request[i : i + points_per_request] | def function[_batches, parameter[self, request, points_per_request]]:
constant[
Generator for creating 'request batches'. Each batch contains a maximum of "points_per_request"
points to read.
:params: request a list of point_name as a list
:params: (int) points_per_request
:returns: (iter) list of point_name of size <= points_per_request
]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[request]]], name[points_per_request]]]] begin[:]
<ast.Yield object at 0x7da1b0409ea0> | keyword[def] identifier[_batches] ( identifier[self] , identifier[request] , identifier[points_per_request] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[request] ), identifier[points_per_request] ):
keyword[yield] identifier[request] [ identifier[i] : identifier[i] + identifier[points_per_request] ] | def _batches(self, request, points_per_request):
"""
Generator for creating 'request batches'. Each batch contains a maximum of "points_per_request"
points to read.
:params: request a list of point_name as a list
:params: (int) points_per_request
:returns: (iter) list of point_name of size <= points_per_request
"""
for i in range(0, len(request), points_per_request):
yield request[i:i + points_per_request] # depends on [control=['for'], data=['i']] |
def logout(self):
"""
Logout from the backend
:return: return True if logout is successfull, otherwise False
:rtype: bool
"""
logger.debug("request backend logout")
if not self.authenticated:
logger.warning("Unnecessary logout ...")
return True
endpoint = 'logout'
_ = self.get_response(method='POST', endpoint=endpoint)
self.session.close()
self.set_token(token=None)
return True | def function[logout, parameter[self]]:
constant[
Logout from the backend
:return: return True if logout is successfull, otherwise False
:rtype: bool
]
call[name[logger].debug, parameter[constant[request backend logout]]]
if <ast.UnaryOp object at 0x7da18f09cfd0> begin[:]
call[name[logger].warning, parameter[constant[Unnecessary logout ...]]]
return[constant[True]]
variable[endpoint] assign[=] constant[logout]
variable[_] assign[=] call[name[self].get_response, parameter[]]
call[name[self].session.close, parameter[]]
call[name[self].set_token, parameter[]]
return[constant[True]] | keyword[def] identifier[logout] ( identifier[self] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[authenticated] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return] keyword[True]
identifier[endpoint] = literal[string]
identifier[_] = identifier[self] . identifier[get_response] ( identifier[method] = literal[string] , identifier[endpoint] = identifier[endpoint] )
identifier[self] . identifier[session] . identifier[close] ()
identifier[self] . identifier[set_token] ( identifier[token] = keyword[None] )
keyword[return] keyword[True] | def logout(self):
"""
Logout from the backend
:return: return True if logout is successfull, otherwise False
:rtype: bool
"""
logger.debug('request backend logout')
if not self.authenticated:
logger.warning('Unnecessary logout ...')
return True # depends on [control=['if'], data=[]]
endpoint = 'logout'
_ = self.get_response(method='POST', endpoint=endpoint)
self.session.close()
self.set_token(token=None)
return True |
def shared_cost(self):
"""
Access the shared_cost
:returns: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
:rtype: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
"""
if self._shared_cost is None:
self._shared_cost = SharedCostList(
self._version,
account_sid=self._solution['account_sid'],
country_code=self._solution['country_code'],
)
return self._shared_cost | def function[shared_cost, parameter[self]]:
constant[
Access the shared_cost
:returns: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
:rtype: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
]
if compare[name[self]._shared_cost is constant[None]] begin[:]
name[self]._shared_cost assign[=] call[name[SharedCostList], parameter[name[self]._version]]
return[name[self]._shared_cost] | keyword[def] identifier[shared_cost] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_shared_cost] keyword[is] keyword[None] :
identifier[self] . identifier[_shared_cost] = identifier[SharedCostList] (
identifier[self] . identifier[_version] ,
identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],
identifier[country_code] = identifier[self] . identifier[_solution] [ literal[string] ],
)
keyword[return] identifier[self] . identifier[_shared_cost] | def shared_cost(self):
"""
Access the shared_cost
:returns: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
:rtype: twilio.rest.api.v2010.account.available_phone_number.shared_cost.SharedCostList
"""
if self._shared_cost is None:
self._shared_cost = SharedCostList(self._version, account_sid=self._solution['account_sid'], country_code=self._solution['country_code']) # depends on [control=['if'], data=[]]
return self._shared_cost |
def save_plots(Figs, filenames, **kwargs):
"""
Parameters
----------
Figs : dict
dictionary of plots, e.g. {'eqarea': 1, ...}
filenames : dict
dictionary of filenames, e.g. {'eqarea': 'mc01a_eqarea.svg', ...}
dict keys should correspond with Figs
"""
saved = []
for key in list(Figs.keys()):
try:
plt.figure(num=Figs[key])
fname = filenames[key]
if set_env.IS_WIN: # always truncate filenames if on Windows
fname = os.path.split(fname)[1]
if not isServer: # remove illegal ':' character for windows
fname = fname.replace(':', '_')
if 'incl_directory' in kwargs.keys() and not set_env.IS_WIN:
if kwargs['incl_directory']:
pass # do not flatten file name
else:
fname = fname.replace('/', '-') # flatten file name
else:
fname = fname.replace('/', '-') # flatten file name
if 'dpi' in list(kwargs.keys()):
plt.savefig(fname, dpi=kwargs['dpi'])
elif isServer:
plt.savefig(fname, dpi=240)
else:
plt.savefig(fname)
if verbose:
print(Figs[key], " saved in ", fname)
saved.append(fname)
plt.close(Figs[key])
except Exception as ex:
print(type(ex), ex)
print('could not save: ', Figs[key], filenames[key])
print("output file format not supported ")
return saved | def function[save_plots, parameter[Figs, filenames]]:
constant[
Parameters
----------
Figs : dict
dictionary of plots, e.g. {'eqarea': 1, ...}
filenames : dict
dictionary of filenames, e.g. {'eqarea': 'mc01a_eqarea.svg', ...}
dict keys should correspond with Figs
]
variable[saved] assign[=] list[[]]
for taget[name[key]] in starred[call[name[list], parameter[call[name[Figs].keys, parameter[]]]]] begin[:]
<ast.Try object at 0x7da2041da0b0>
return[name[saved]] | keyword[def] identifier[save_plots] ( identifier[Figs] , identifier[filenames] ,** identifier[kwargs] ):
literal[string]
identifier[saved] =[]
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[Figs] . identifier[keys] ()):
keyword[try] :
identifier[plt] . identifier[figure] ( identifier[num] = identifier[Figs] [ identifier[key] ])
identifier[fname] = identifier[filenames] [ identifier[key] ]
keyword[if] identifier[set_env] . identifier[IS_WIN] :
identifier[fname] = identifier[os] . identifier[path] . identifier[split] ( identifier[fname] )[ literal[int] ]
keyword[if] keyword[not] identifier[isServer] :
identifier[fname] = identifier[fname] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[kwargs] . identifier[keys] () keyword[and] keyword[not] identifier[set_env] . identifier[IS_WIN] :
keyword[if] identifier[kwargs] [ literal[string] ]:
keyword[pass]
keyword[else] :
identifier[fname] = identifier[fname] . identifier[replace] ( literal[string] , literal[string] )
keyword[else] :
identifier[fname] = identifier[fname] . identifier[replace] ( literal[string] , literal[string] )
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[kwargs] . identifier[keys] ()):
identifier[plt] . identifier[savefig] ( identifier[fname] , identifier[dpi] = identifier[kwargs] [ literal[string] ])
keyword[elif] identifier[isServer] :
identifier[plt] . identifier[savefig] ( identifier[fname] , identifier[dpi] = literal[int] )
keyword[else] :
identifier[plt] . identifier[savefig] ( identifier[fname] )
keyword[if] identifier[verbose] :
identifier[print] ( identifier[Figs] [ identifier[key] ], literal[string] , identifier[fname] )
identifier[saved] . identifier[append] ( identifier[fname] )
identifier[plt] . identifier[close] ( identifier[Figs] [ identifier[key] ])
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[print] ( identifier[type] ( identifier[ex] ), identifier[ex] )
identifier[print] ( literal[string] , identifier[Figs] [ identifier[key] ], identifier[filenames] [ identifier[key] ])
identifier[print] ( literal[string] )
keyword[return] identifier[saved] | def save_plots(Figs, filenames, **kwargs):
"""
Parameters
----------
Figs : dict
dictionary of plots, e.g. {'eqarea': 1, ...}
filenames : dict
dictionary of filenames, e.g. {'eqarea': 'mc01a_eqarea.svg', ...}
dict keys should correspond with Figs
"""
saved = []
for key in list(Figs.keys()):
try:
plt.figure(num=Figs[key])
fname = filenames[key]
if set_env.IS_WIN: # always truncate filenames if on Windows
fname = os.path.split(fname)[1] # depends on [control=['if'], data=[]]
if not isServer: # remove illegal ':' character for windows
fname = fname.replace(':', '_') # depends on [control=['if'], data=[]]
if 'incl_directory' in kwargs.keys() and (not set_env.IS_WIN):
if kwargs['incl_directory']:
pass # do not flatten file name # depends on [control=['if'], data=[]]
else:
fname = fname.replace('/', '-') # flatten file name # depends on [control=['if'], data=[]]
else:
fname = fname.replace('/', '-') # flatten file name
if 'dpi' in list(kwargs.keys()):
plt.savefig(fname, dpi=kwargs['dpi']) # depends on [control=['if'], data=[]]
elif isServer:
plt.savefig(fname, dpi=240) # depends on [control=['if'], data=[]]
else:
plt.savefig(fname)
if verbose:
print(Figs[key], ' saved in ', fname) # depends on [control=['if'], data=[]]
saved.append(fname)
plt.close(Figs[key]) # depends on [control=['try'], data=[]]
except Exception as ex:
print(type(ex), ex)
print('could not save: ', Figs[key], filenames[key])
print('output file format not supported ') # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=['key']]
return saved |
def reindex_all(self, model, batch_size=1000):
"""
Reindex all the records.
By default, this method use Model.objects.all() but you can implement
a method `get_queryset` in your subclass. This can be used to optimize
the performance (for example with select_related or prefetch_related).
"""
adapter = self.get_adapter(model)
return adapter.reindex_all(batch_size) | def function[reindex_all, parameter[self, model, batch_size]]:
constant[
Reindex all the records.
By default, this method use Model.objects.all() but you can implement
a method `get_queryset` in your subclass. This can be used to optimize
the performance (for example with select_related or prefetch_related).
]
variable[adapter] assign[=] call[name[self].get_adapter, parameter[name[model]]]
return[call[name[adapter].reindex_all, parameter[name[batch_size]]]] | keyword[def] identifier[reindex_all] ( identifier[self] , identifier[model] , identifier[batch_size] = literal[int] ):
literal[string]
identifier[adapter] = identifier[self] . identifier[get_adapter] ( identifier[model] )
keyword[return] identifier[adapter] . identifier[reindex_all] ( identifier[batch_size] ) | def reindex_all(self, model, batch_size=1000):
"""
Reindex all the records.
By default, this method use Model.objects.all() but you can implement
a method `get_queryset` in your subclass. This can be used to optimize
the performance (for example with select_related or prefetch_related).
"""
adapter = self.get_adapter(model)
return adapter.reindex_all(batch_size) |
def replaceText(self, pos, length, text):
"""Replace length symbols from ``pos`` with new text.
If ``pos`` is an integer, it is interpreted as absolute position, if a tuple - as ``(line, column)``
"""
if isinstance(pos, tuple):
pos = self.mapToAbsPosition(*pos)
endPos = pos + length
if not self.document().findBlock(pos).isValid():
raise IndexError('Invalid start position %d' % pos)
if not self.document().findBlock(endPos).isValid():
raise IndexError('Invalid end position %d' % endPos)
cursor = QTextCursor(self.document())
cursor.setPosition(pos)
cursor.setPosition(endPos, QTextCursor.KeepAnchor)
cursor.insertText(text) | def function[replaceText, parameter[self, pos, length, text]]:
constant[Replace length symbols from ``pos`` with new text.
If ``pos`` is an integer, it is interpreted as absolute position, if a tuple - as ``(line, column)``
]
if call[name[isinstance], parameter[name[pos], name[tuple]]] begin[:]
variable[pos] assign[=] call[name[self].mapToAbsPosition, parameter[<ast.Starred object at 0x7da207f03e50>]]
variable[endPos] assign[=] binary_operation[name[pos] + name[length]]
if <ast.UnaryOp object at 0x7da207f01240> begin[:]
<ast.Raise object at 0x7da207f00df0>
if <ast.UnaryOp object at 0x7da207f03fd0> begin[:]
<ast.Raise object at 0x7da207f00fd0>
variable[cursor] assign[=] call[name[QTextCursor], parameter[call[name[self].document, parameter[]]]]
call[name[cursor].setPosition, parameter[name[pos]]]
call[name[cursor].setPosition, parameter[name[endPos], name[QTextCursor].KeepAnchor]]
call[name[cursor].insertText, parameter[name[text]]] | keyword[def] identifier[replaceText] ( identifier[self] , identifier[pos] , identifier[length] , identifier[text] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[pos] , identifier[tuple] ):
identifier[pos] = identifier[self] . identifier[mapToAbsPosition] (* identifier[pos] )
identifier[endPos] = identifier[pos] + identifier[length]
keyword[if] keyword[not] identifier[self] . identifier[document] (). identifier[findBlock] ( identifier[pos] ). identifier[isValid] ():
keyword[raise] identifier[IndexError] ( literal[string] % identifier[pos] )
keyword[if] keyword[not] identifier[self] . identifier[document] (). identifier[findBlock] ( identifier[endPos] ). identifier[isValid] ():
keyword[raise] identifier[IndexError] ( literal[string] % identifier[endPos] )
identifier[cursor] = identifier[QTextCursor] ( identifier[self] . identifier[document] ())
identifier[cursor] . identifier[setPosition] ( identifier[pos] )
identifier[cursor] . identifier[setPosition] ( identifier[endPos] , identifier[QTextCursor] . identifier[KeepAnchor] )
identifier[cursor] . identifier[insertText] ( identifier[text] ) | def replaceText(self, pos, length, text):
"""Replace length symbols from ``pos`` with new text.
If ``pos`` is an integer, it is interpreted as absolute position, if a tuple - as ``(line, column)``
"""
if isinstance(pos, tuple):
pos = self.mapToAbsPosition(*pos) # depends on [control=['if'], data=[]]
endPos = pos + length
if not self.document().findBlock(pos).isValid():
raise IndexError('Invalid start position %d' % pos) # depends on [control=['if'], data=[]]
if not self.document().findBlock(endPos).isValid():
raise IndexError('Invalid end position %d' % endPos) # depends on [control=['if'], data=[]]
cursor = QTextCursor(self.document())
cursor.setPosition(pos)
cursor.setPosition(endPos, QTextCursor.KeepAnchor)
cursor.insertText(text) |
def decorator(caller, _func=None):
"""decorator(caller) converts a caller function into a decorator"""
if _func is not None: # return a decorated function
# this is obsolete behavior; you should use decorate instead
return decorate(_func, caller)
# else return a decorator function
if inspect.isclass(caller):
name = caller.__name__.lower()
callerfunc = get_init(caller)
doc = 'decorator(%s) converts functions/generators into ' \
'factories of %s objects' % (caller.__name__, caller.__name__)
fun = getfullargspec(callerfunc).args[1] # second arg
elif inspect.isfunction(caller):
if caller.__name__ == '<lambda>':
name = '_lambda_'
else:
name = caller.__name__
callerfunc = caller
doc = caller.__doc__
fun = getfullargspec(callerfunc).args[0] # first arg
else: # assume caller is an object with a __call__ method
name = caller.__class__.__name__.lower()
callerfunc = caller.__call__.__func__
doc = caller.__call__.__doc__
fun = getfullargspec(callerfunc).args[1] # second arg
evaldict = callerfunc.__globals__.copy()
evaldict['_call_'] = caller
evaldict['_decorate_'] = decorate
return FunctionMaker.create(
'%s(%s)' % (name, fun),
'return _decorate_(%s, _call_)' % fun,
evaldict, call=caller, doc=doc, module=caller.__module__,
__wrapped__=caller) | def function[decorator, parameter[caller, _func]]:
constant[decorator(caller) converts a caller function into a decorator]
if compare[name[_func] is_not constant[None]] begin[:]
return[call[name[decorate], parameter[name[_func], name[caller]]]]
if call[name[inspect].isclass, parameter[name[caller]]] begin[:]
variable[name] assign[=] call[name[caller].__name__.lower, parameter[]]
variable[callerfunc] assign[=] call[name[get_init], parameter[name[caller]]]
variable[doc] assign[=] binary_operation[constant[decorator(%s) converts functions/generators into factories of %s objects] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e5870>, <ast.Attribute object at 0x7da20c6e60e0>]]]
variable[fun] assign[=] call[call[name[getfullargspec], parameter[name[callerfunc]]].args][constant[1]]
variable[evaldict] assign[=] call[name[callerfunc].__globals__.copy, parameter[]]
call[name[evaldict]][constant[_call_]] assign[=] name[caller]
call[name[evaldict]][constant[_decorate_]] assign[=] name[decorate]
return[call[name[FunctionMaker].create, parameter[binary_operation[constant[%s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc9a980>, <ast.Name object at 0x7da18dc99990>]]], binary_operation[constant[return _decorate_(%s, _call_)] <ast.Mod object at 0x7da2590d6920> name[fun]], name[evaldict]]]] | keyword[def] identifier[decorator] ( identifier[caller] , identifier[_func] = keyword[None] ):
literal[string]
keyword[if] identifier[_func] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[decorate] ( identifier[_func] , identifier[caller] )
keyword[if] identifier[inspect] . identifier[isclass] ( identifier[caller] ):
identifier[name] = identifier[caller] . identifier[__name__] . identifier[lower] ()
identifier[callerfunc] = identifier[get_init] ( identifier[caller] )
identifier[doc] = literal[string] literal[string] %( identifier[caller] . identifier[__name__] , identifier[caller] . identifier[__name__] )
identifier[fun] = identifier[getfullargspec] ( identifier[callerfunc] ). identifier[args] [ literal[int] ]
keyword[elif] identifier[inspect] . identifier[isfunction] ( identifier[caller] ):
keyword[if] identifier[caller] . identifier[__name__] == literal[string] :
identifier[name] = literal[string]
keyword[else] :
identifier[name] = identifier[caller] . identifier[__name__]
identifier[callerfunc] = identifier[caller]
identifier[doc] = identifier[caller] . identifier[__doc__]
identifier[fun] = identifier[getfullargspec] ( identifier[callerfunc] ). identifier[args] [ literal[int] ]
keyword[else] :
identifier[name] = identifier[caller] . identifier[__class__] . identifier[__name__] . identifier[lower] ()
identifier[callerfunc] = identifier[caller] . identifier[__call__] . identifier[__func__]
identifier[doc] = identifier[caller] . identifier[__call__] . identifier[__doc__]
identifier[fun] = identifier[getfullargspec] ( identifier[callerfunc] ). identifier[args] [ literal[int] ]
identifier[evaldict] = identifier[callerfunc] . identifier[__globals__] . identifier[copy] ()
identifier[evaldict] [ literal[string] ]= identifier[caller]
identifier[evaldict] [ literal[string] ]= identifier[decorate]
keyword[return] identifier[FunctionMaker] . identifier[create] (
literal[string] %( identifier[name] , identifier[fun] ),
literal[string] % identifier[fun] ,
identifier[evaldict] , identifier[call] = identifier[caller] , identifier[doc] = identifier[doc] , identifier[module] = identifier[caller] . identifier[__module__] ,
identifier[__wrapped__] = identifier[caller] ) | def decorator(caller, _func=None):
"""decorator(caller) converts a caller function into a decorator"""
if _func is not None: # return a decorated function
# this is obsolete behavior; you should use decorate instead
return decorate(_func, caller) # depends on [control=['if'], data=['_func']]
# else return a decorator function
if inspect.isclass(caller):
name = caller.__name__.lower()
callerfunc = get_init(caller)
doc = 'decorator(%s) converts functions/generators into factories of %s objects' % (caller.__name__, caller.__name__)
fun = getfullargspec(callerfunc).args[1] # second arg # depends on [control=['if'], data=[]]
elif inspect.isfunction(caller):
if caller.__name__ == '<lambda>':
name = '_lambda_' # depends on [control=['if'], data=[]]
else:
name = caller.__name__
callerfunc = caller
doc = caller.__doc__
fun = getfullargspec(callerfunc).args[0] # first arg # depends on [control=['if'], data=[]]
else: # assume caller is an object with a __call__ method
name = caller.__class__.__name__.lower()
callerfunc = caller.__call__.__func__
doc = caller.__call__.__doc__
fun = getfullargspec(callerfunc).args[1] # second arg
evaldict = callerfunc.__globals__.copy()
evaldict['_call_'] = caller
evaldict['_decorate_'] = decorate
return FunctionMaker.create('%s(%s)' % (name, fun), 'return _decorate_(%s, _call_)' % fun, evaldict, call=caller, doc=doc, module=caller.__module__, __wrapped__=caller) |
def to_file(self, outputfile=DEFAULT_OUTPUTFILE):
"""Write the report to a file.
By default a name is generated.
Parameters:
----------
outputfile : str
The name or the path of the file to generale including the extension (.html).
"""
if outputfile != NO_OUTPUTFILE:
if outputfile == DEFAULT_OUTPUTFILE:
outputfile = 'profile_' + str(hash(self)) + ".html"
# TODO: should be done in the template
with codecs.open(outputfile, 'w+b', encoding='utf8') as self.file:
self.file.write(templates.template('wrapper').render(content=self.html)) | def function[to_file, parameter[self, outputfile]]:
constant[Write the report to a file.
By default a name is generated.
Parameters:
----------
outputfile : str
The name or the path of the file to generale including the extension (.html).
]
if compare[name[outputfile] not_equal[!=] name[NO_OUTPUTFILE]] begin[:]
if compare[name[outputfile] equal[==] name[DEFAULT_OUTPUTFILE]] begin[:]
variable[outputfile] assign[=] binary_operation[binary_operation[constant[profile_] + call[name[str], parameter[call[name[hash], parameter[name[self]]]]]] + constant[.html]]
with call[name[codecs].open, parameter[name[outputfile], constant[w+b]]] begin[:]
call[name[self].file.write, parameter[call[call[name[templates].template, parameter[constant[wrapper]]].render, parameter[]]]] | keyword[def] identifier[to_file] ( identifier[self] , identifier[outputfile] = identifier[DEFAULT_OUTPUTFILE] ):
literal[string]
keyword[if] identifier[outputfile] != identifier[NO_OUTPUTFILE] :
keyword[if] identifier[outputfile] == identifier[DEFAULT_OUTPUTFILE] :
identifier[outputfile] = literal[string] + identifier[str] ( identifier[hash] ( identifier[self] ))+ literal[string]
keyword[with] identifier[codecs] . identifier[open] ( identifier[outputfile] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[self] . identifier[file] :
identifier[self] . identifier[file] . identifier[write] ( identifier[templates] . identifier[template] ( literal[string] ). identifier[render] ( identifier[content] = identifier[self] . identifier[html] )) | def to_file(self, outputfile=DEFAULT_OUTPUTFILE):
"""Write the report to a file.
By default a name is generated.
Parameters:
----------
outputfile : str
The name or the path of the file to generale including the extension (.html).
"""
if outputfile != NO_OUTPUTFILE:
if outputfile == DEFAULT_OUTPUTFILE:
outputfile = 'profile_' + str(hash(self)) + '.html' # depends on [control=['if'], data=['outputfile']]
# TODO: should be done in the template
with codecs.open(outputfile, 'w+b', encoding='utf8') as self.file:
self.file.write(templates.template('wrapper').render(content=self.html)) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['outputfile']] |
def find_conda():
""" Try to find conda on the system """
USER_HOME = os.path.expanduser('~')
CONDA_HOME = os.environ.get('CONDA_HOME', '')
PROGRAMDATA = os.environ.get('PROGRAMDATA', '')
# Search common install paths and sys path
search_paths = [
# Windows
join(PROGRAMDATA, 'miniconda2', 'scripts'),
join(PROGRAMDATA, 'miniconda3', 'scripts'),
join(USER_HOME, 'miniconda2', 'scripts'),
join(USER_HOME, 'miniconda3', 'scripts'),
join(CONDA_HOME, 'scripts'),
# Linux
join(USER_HOME, 'miniconda2', 'bin'),
join(USER_HOME, 'miniconda3', 'bin'),
join(CONDA_HOME, 'bin'),
# TODO: OSX
] + os.environ.get("PATH", "").split(";" if 'win' in sys.path else ":")
cmd = 'conda.exe' if IS_WIN else 'conda'
for conda_path in search_paths:
conda = join(conda_path, cmd)
if exists(conda):
return sh.Command(conda)
# Try to let the system find it
return sh.conda | def function[find_conda, parameter[]]:
constant[ Try to find conda on the system ]
variable[USER_HOME] assign[=] call[name[os].path.expanduser, parameter[constant[~]]]
variable[CONDA_HOME] assign[=] call[name[os].environ.get, parameter[constant[CONDA_HOME], constant[]]]
variable[PROGRAMDATA] assign[=] call[name[os].environ.get, parameter[constant[PROGRAMDATA], constant[]]]
variable[search_paths] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b2347a60>, <ast.Call object at 0x7da1b2344850>, <ast.Call object at 0x7da1b2346800>, <ast.Call object at 0x7da20e9b1ff0>, <ast.Call object at 0x7da20e9b0040>, <ast.Call object at 0x7da20e963250>, <ast.Call object at 0x7da20e960b50>, <ast.Call object at 0x7da20e960220>]] + call[call[name[os].environ.get, parameter[constant[PATH], constant[]]].split, parameter[<ast.IfExp object at 0x7da20e9619f0>]]]
variable[cmd] assign[=] <ast.IfExp object at 0x7da20e963b80>
for taget[name[conda_path]] in starred[name[search_paths]] begin[:]
variable[conda] assign[=] call[name[join], parameter[name[conda_path], name[cmd]]]
if call[name[exists], parameter[name[conda]]] begin[:]
return[call[name[sh].Command, parameter[name[conda]]]]
return[name[sh].conda] | keyword[def] identifier[find_conda] ():
literal[string]
identifier[USER_HOME] = identifier[os] . identifier[path] . identifier[expanduser] ( literal[string] )
identifier[CONDA_HOME] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[PROGRAMDATA] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] )
identifier[search_paths] =[
identifier[join] ( identifier[PROGRAMDATA] , literal[string] , literal[string] ),
identifier[join] ( identifier[PROGRAMDATA] , literal[string] , literal[string] ),
identifier[join] ( identifier[USER_HOME] , literal[string] , literal[string] ),
identifier[join] ( identifier[USER_HOME] , literal[string] , literal[string] ),
identifier[join] ( identifier[CONDA_HOME] , literal[string] ),
identifier[join] ( identifier[USER_HOME] , literal[string] , literal[string] ),
identifier[join] ( identifier[USER_HOME] , literal[string] , literal[string] ),
identifier[join] ( identifier[CONDA_HOME] , literal[string] ),
]+ identifier[os] . identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( literal[string] keyword[if] literal[string] keyword[in] identifier[sys] . identifier[path] keyword[else] literal[string] )
identifier[cmd] = literal[string] keyword[if] identifier[IS_WIN] keyword[else] literal[string]
keyword[for] identifier[conda_path] keyword[in] identifier[search_paths] :
identifier[conda] = identifier[join] ( identifier[conda_path] , identifier[cmd] )
keyword[if] identifier[exists] ( identifier[conda] ):
keyword[return] identifier[sh] . identifier[Command] ( identifier[conda] )
keyword[return] identifier[sh] . identifier[conda] | def find_conda():
""" Try to find conda on the system """
USER_HOME = os.path.expanduser('~')
CONDA_HOME = os.environ.get('CONDA_HOME', '')
PROGRAMDATA = os.environ.get('PROGRAMDATA', '')
# Search common install paths and sys path
# Windows
# Linux
# TODO: OSX
search_paths = [join(PROGRAMDATA, 'miniconda2', 'scripts'), join(PROGRAMDATA, 'miniconda3', 'scripts'), join(USER_HOME, 'miniconda2', 'scripts'), join(USER_HOME, 'miniconda3', 'scripts'), join(CONDA_HOME, 'scripts'), join(USER_HOME, 'miniconda2', 'bin'), join(USER_HOME, 'miniconda3', 'bin'), join(CONDA_HOME, 'bin')] + os.environ.get('PATH', '').split(';' if 'win' in sys.path else ':')
cmd = 'conda.exe' if IS_WIN else 'conda'
for conda_path in search_paths:
conda = join(conda_path, cmd)
if exists(conda):
return sh.Command(conda) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['conda_path']]
# Try to let the system find it
return sh.conda |
def render_breadcrumbs(context, *args):
"""
Render breadcrumbs html using bootstrap css classes.
"""
try:
template_path = args[0]
except IndexError:
template_path = getattr(settings, 'BREADCRUMBS_TEMPLATE',
'django_bootstrap_breadcrumbs/bootstrap2.html')
links = []
for (label, viewname, view_args, view_kwargs) in context[
'request'].META.get(CONTEXT_KEY, []):
if isinstance(viewname, Model) and hasattr(
viewname, 'get_absolute_url') and ismethod(
viewname.get_absolute_url):
url = viewname.get_absolute_url(*view_args, **view_kwargs)
else:
try:
try:
# 'resolver_match' introduced in Django 1.5
current_app = context['request'].resolver_match.namespace
except AttributeError:
try:
resolver_match = resolve(context['request'].path)
current_app = resolver_match.namespace
except Resolver404:
current_app = None
url = reverse(viewname=viewname, args=view_args,
kwargs=view_kwargs, current_app=current_app)
except NoReverseMatch:
url = viewname
links.append((url, smart_text(label) if label else label))
if not links:
return ''
if VERSION > (1, 8): # pragma: nocover
# RequestContext is deprecated in recent django
# https://docs.djangoproject.com/en/1.10/ref/templates/upgrading/
context = context.flatten()
context['breadcrumbs'] = links
context['breadcrumbs_total'] = len(links)
return mark_safe(template.loader.render_to_string(template_path, context)) | def function[render_breadcrumbs, parameter[context]]:
constant[
Render breadcrumbs html using bootstrap css classes.
]
<ast.Try object at 0x7da20c990a30>
variable[links] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c993f10>, <ast.Name object at 0x7da20c993460>, <ast.Name object at 0x7da20c990e80>, <ast.Name object at 0x7da20c992b30>]]] in starred[call[call[name[context]][constant[request]].META.get, parameter[name[CONTEXT_KEY], list[[]]]]] begin[:]
if <ast.BoolOp object at 0x7da20c992d70> begin[:]
variable[url] assign[=] call[name[viewname].get_absolute_url, parameter[<ast.Starred object at 0x7da20c991ab0>]]
call[name[links].append, parameter[tuple[[<ast.Name object at 0x7da20c9902b0>, <ast.IfExp object at 0x7da20c990520>]]]]
if <ast.UnaryOp object at 0x7da20c992050> begin[:]
return[constant[]]
if compare[name[VERSION] greater[>] tuple[[<ast.Constant object at 0x7da20c992ec0>, <ast.Constant object at 0x7da20c990970>]]] begin[:]
variable[context] assign[=] call[name[context].flatten, parameter[]]
call[name[context]][constant[breadcrumbs]] assign[=] name[links]
call[name[context]][constant[breadcrumbs_total]] assign[=] call[name[len], parameter[name[links]]]
return[call[name[mark_safe], parameter[call[name[template].loader.render_to_string, parameter[name[template_path], name[context]]]]]] | keyword[def] identifier[render_breadcrumbs] ( identifier[context] ,* identifier[args] ):
literal[string]
keyword[try] :
identifier[template_path] = identifier[args] [ literal[int] ]
keyword[except] identifier[IndexError] :
identifier[template_path] = identifier[getattr] ( identifier[settings] , literal[string] ,
literal[string] )
identifier[links] =[]
keyword[for] ( identifier[label] , identifier[viewname] , identifier[view_args] , identifier[view_kwargs] ) keyword[in] identifier[context] [
literal[string] ]. identifier[META] . identifier[get] ( identifier[CONTEXT_KEY] ,[]):
keyword[if] identifier[isinstance] ( identifier[viewname] , identifier[Model] ) keyword[and] identifier[hasattr] (
identifier[viewname] , literal[string] ) keyword[and] identifier[ismethod] (
identifier[viewname] . identifier[get_absolute_url] ):
identifier[url] = identifier[viewname] . identifier[get_absolute_url] (* identifier[view_args] ,** identifier[view_kwargs] )
keyword[else] :
keyword[try] :
keyword[try] :
identifier[current_app] = identifier[context] [ literal[string] ]. identifier[resolver_match] . identifier[namespace]
keyword[except] identifier[AttributeError] :
keyword[try] :
identifier[resolver_match] = identifier[resolve] ( identifier[context] [ literal[string] ]. identifier[path] )
identifier[current_app] = identifier[resolver_match] . identifier[namespace]
keyword[except] identifier[Resolver404] :
identifier[current_app] = keyword[None]
identifier[url] = identifier[reverse] ( identifier[viewname] = identifier[viewname] , identifier[args] = identifier[view_args] ,
identifier[kwargs] = identifier[view_kwargs] , identifier[current_app] = identifier[current_app] )
keyword[except] identifier[NoReverseMatch] :
identifier[url] = identifier[viewname]
identifier[links] . identifier[append] (( identifier[url] , identifier[smart_text] ( identifier[label] ) keyword[if] identifier[label] keyword[else] identifier[label] ))
keyword[if] keyword[not] identifier[links] :
keyword[return] literal[string]
keyword[if] identifier[VERSION] >( literal[int] , literal[int] ):
identifier[context] = identifier[context] . identifier[flatten] ()
identifier[context] [ literal[string] ]= identifier[links]
identifier[context] [ literal[string] ]= identifier[len] ( identifier[links] )
keyword[return] identifier[mark_safe] ( identifier[template] . identifier[loader] . identifier[render_to_string] ( identifier[template_path] , identifier[context] )) | def render_breadcrumbs(context, *args):
"""
Render breadcrumbs html using bootstrap css classes.
"""
try:
template_path = args[0] # depends on [control=['try'], data=[]]
except IndexError:
template_path = getattr(settings, 'BREADCRUMBS_TEMPLATE', 'django_bootstrap_breadcrumbs/bootstrap2.html') # depends on [control=['except'], data=[]]
links = []
for (label, viewname, view_args, view_kwargs) in context['request'].META.get(CONTEXT_KEY, []):
if isinstance(viewname, Model) and hasattr(viewname, 'get_absolute_url') and ismethod(viewname.get_absolute_url):
url = viewname.get_absolute_url(*view_args, **view_kwargs) # depends on [control=['if'], data=[]]
else:
try:
try:
# 'resolver_match' introduced in Django 1.5
current_app = context['request'].resolver_match.namespace # depends on [control=['try'], data=[]]
except AttributeError:
try:
resolver_match = resolve(context['request'].path)
current_app = resolver_match.namespace # depends on [control=['try'], data=[]]
except Resolver404:
current_app = None # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
url = reverse(viewname=viewname, args=view_args, kwargs=view_kwargs, current_app=current_app) # depends on [control=['try'], data=[]]
except NoReverseMatch:
url = viewname # depends on [control=['except'], data=[]]
links.append((url, smart_text(label) if label else label)) # depends on [control=['for'], data=[]]
if not links:
return '' # depends on [control=['if'], data=[]]
if VERSION > (1, 8): # pragma: nocover
# RequestContext is deprecated in recent django
# https://docs.djangoproject.com/en/1.10/ref/templates/upgrading/
context = context.flatten() # depends on [control=['if'], data=[]]
context['breadcrumbs'] = links
context['breadcrumbs_total'] = len(links)
return mark_safe(template.loader.render_to_string(template_path, context)) |
def decamel_to_snake(string):
"""Convert to lower case, join camel case with underscore.
CamelCase -> camel_case. Camel Case -> camel_case.
"""
strings = [decamel(word) if not word.isupper() else word.lower()
for word in string.split()]
return "_".join([snake(dstring)for dstring in strings]) | def function[decamel_to_snake, parameter[string]]:
constant[Convert to lower case, join camel case with underscore.
CamelCase -> camel_case. Camel Case -> camel_case.
]
variable[strings] assign[=] <ast.ListComp object at 0x7da1b1bb2350>
return[call[constant[_].join, parameter[<ast.ListComp object at 0x7da1b1bb3520>]]] | keyword[def] identifier[decamel_to_snake] ( identifier[string] ):
literal[string]
identifier[strings] =[ identifier[decamel] ( identifier[word] ) keyword[if] keyword[not] identifier[word] . identifier[isupper] () keyword[else] identifier[word] . identifier[lower] ()
keyword[for] identifier[word] keyword[in] identifier[string] . identifier[split] ()]
keyword[return] literal[string] . identifier[join] ([ identifier[snake] ( identifier[dstring] ) keyword[for] identifier[dstring] keyword[in] identifier[strings] ]) | def decamel_to_snake(string):
"""Convert to lower case, join camel case with underscore.
CamelCase -> camel_case. Camel Case -> camel_case.
"""
strings = [decamel(word) if not word.isupper() else word.lower() for word in string.split()]
return '_'.join([snake(dstring) for dstring in strings]) |
def err(msg, level=-1, prefix=True):
"""Prints the specified message as an error; prepends "ERROR" to
the message, so that can be left off.
"""
if will_print(level) or verbosity is None:
printer(("ERROR: " if prefix else "") + msg, "red") | def function[err, parameter[msg, level, prefix]]:
constant[Prints the specified message as an error; prepends "ERROR" to
the message, so that can be left off.
]
if <ast.BoolOp object at 0x7da1b2699660> begin[:]
call[name[printer], parameter[binary_operation[<ast.IfExp object at 0x7da1b2698af0> + name[msg]], constant[red]]] | keyword[def] identifier[err] ( identifier[msg] , identifier[level] =- literal[int] , identifier[prefix] = keyword[True] ):
literal[string]
keyword[if] identifier[will_print] ( identifier[level] ) keyword[or] identifier[verbosity] keyword[is] keyword[None] :
identifier[printer] (( literal[string] keyword[if] identifier[prefix] keyword[else] literal[string] )+ identifier[msg] , literal[string] ) | def err(msg, level=-1, prefix=True):
"""Prints the specified message as an error; prepends "ERROR" to
the message, so that can be left off.
"""
if will_print(level) or verbosity is None:
printer(('ERROR: ' if prefix else '') + msg, 'red') # depends on [control=['if'], data=[]] |
def unique(new_cmp_dict, old_cmp_dict):
"""Return a list dict of
the unique keys in new_cmp_dict
"""
newkeys = set(new_cmp_dict)
oldkeys = set(old_cmp_dict)
unique = newkeys - oldkeys
unique_ldict = []
for key in unique:
unique_ldict.append(new_cmp_dict[key])
return unique_ldict | def function[unique, parameter[new_cmp_dict, old_cmp_dict]]:
constant[Return a list dict of
the unique keys in new_cmp_dict
]
variable[newkeys] assign[=] call[name[set], parameter[name[new_cmp_dict]]]
variable[oldkeys] assign[=] call[name[set], parameter[name[old_cmp_dict]]]
variable[unique] assign[=] binary_operation[name[newkeys] - name[oldkeys]]
variable[unique_ldict] assign[=] list[[]]
for taget[name[key]] in starred[name[unique]] begin[:]
call[name[unique_ldict].append, parameter[call[name[new_cmp_dict]][name[key]]]]
return[name[unique_ldict]] | keyword[def] identifier[unique] ( identifier[new_cmp_dict] , identifier[old_cmp_dict] ):
literal[string]
identifier[newkeys] = identifier[set] ( identifier[new_cmp_dict] )
identifier[oldkeys] = identifier[set] ( identifier[old_cmp_dict] )
identifier[unique] = identifier[newkeys] - identifier[oldkeys]
identifier[unique_ldict] =[]
keyword[for] identifier[key] keyword[in] identifier[unique] :
identifier[unique_ldict] . identifier[append] ( identifier[new_cmp_dict] [ identifier[key] ])
keyword[return] identifier[unique_ldict] | def unique(new_cmp_dict, old_cmp_dict):
"""Return a list dict of
the unique keys in new_cmp_dict
"""
newkeys = set(new_cmp_dict)
oldkeys = set(old_cmp_dict)
unique = newkeys - oldkeys
unique_ldict = []
for key in unique:
unique_ldict.append(new_cmp_dict[key]) # depends on [control=['for'], data=['key']]
return unique_ldict |
def result(self, timeout=None):
"""Return the value returned by the call. If the call hasn't yet
completed then this method will wait up to ''timeout'' seconds. More
information in the :doc:`usage` page. If the call hasn't completed in
timeout seconds then a TimeoutError will be raised. If timeout is not
specified or None then there is no limit to the wait time.
If the future is cancelled before completing then CancelledError will
be raised.
If the call raised an exception then this method will raise the same
exception.
:returns: The value returned by the callable object."""
if not self._ended():
return scoop.futures._join(self)
if self.exceptionValue is not None:
raise self.exceptionValue
return self.resultValue | def function[result, parameter[self, timeout]]:
constant[Return the value returned by the call. If the call hasn't yet
completed then this method will wait up to ''timeout'' seconds. More
information in the :doc:`usage` page. If the call hasn't completed in
timeout seconds then a TimeoutError will be raised. If timeout is not
specified or None then there is no limit to the wait time.
If the future is cancelled before completing then CancelledError will
be raised.
If the call raised an exception then this method will raise the same
exception.
:returns: The value returned by the callable object.]
if <ast.UnaryOp object at 0x7da18ede4ac0> begin[:]
return[call[name[scoop].futures._join, parameter[name[self]]]]
if compare[name[self].exceptionValue is_not constant[None]] begin[:]
<ast.Raise object at 0x7da18ede4250>
return[name[self].resultValue] | keyword[def] identifier[result] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ended] ():
keyword[return] identifier[scoop] . identifier[futures] . identifier[_join] ( identifier[self] )
keyword[if] identifier[self] . identifier[exceptionValue] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[self] . identifier[exceptionValue]
keyword[return] identifier[self] . identifier[resultValue] | def result(self, timeout=None):
"""Return the value returned by the call. If the call hasn't yet
completed then this method will wait up to ''timeout'' seconds. More
information in the :doc:`usage` page. If the call hasn't completed in
timeout seconds then a TimeoutError will be raised. If timeout is not
specified or None then there is no limit to the wait time.
If the future is cancelled before completing then CancelledError will
be raised.
If the call raised an exception then this method will raise the same
exception.
:returns: The value returned by the callable object."""
if not self._ended():
return scoop.futures._join(self) # depends on [control=['if'], data=[]]
if self.exceptionValue is not None:
raise self.exceptionValue # depends on [control=['if'], data=[]]
return self.resultValue |
def _add_base_class(mcs, cls):
""" Adds new class *cls* to base classes
"""
# Do all magic only if subclass had defined required attributes
if getattr(mcs, '_base_classes_hash', None) is not None:
meta = getattr(cls, 'Meta', None)
_hash = getattr(meta, mcs._hashattr, None)
if _hash is None and cls not in mcs._get_base_classes():
mcs._base_classes.insert(0, cls)
mcs._generated_class = {} # Cleanup all caches
elif _hash is not None and cls not in mcs._get_base_classes(_hash):
mcs._base_classes_hash[_hash].insert(0, cls)
mcs._generated_class[_hash] = None | def function[_add_base_class, parameter[mcs, cls]]:
constant[ Adds new class *cls* to base classes
]
if compare[call[name[getattr], parameter[name[mcs], constant[_base_classes_hash], constant[None]]] is_not constant[None]] begin[:]
variable[meta] assign[=] call[name[getattr], parameter[name[cls], constant[Meta], constant[None]]]
variable[_hash] assign[=] call[name[getattr], parameter[name[meta], name[mcs]._hashattr, constant[None]]]
if <ast.BoolOp object at 0x7da1b1052410> begin[:]
call[name[mcs]._base_classes.insert, parameter[constant[0], name[cls]]]
name[mcs]._generated_class assign[=] dictionary[[], []] | keyword[def] identifier[_add_base_class] ( identifier[mcs] , identifier[cls] ):
literal[string]
keyword[if] identifier[getattr] ( identifier[mcs] , literal[string] , keyword[None] ) keyword[is] keyword[not] keyword[None] :
identifier[meta] = identifier[getattr] ( identifier[cls] , literal[string] , keyword[None] )
identifier[_hash] = identifier[getattr] ( identifier[meta] , identifier[mcs] . identifier[_hashattr] , keyword[None] )
keyword[if] identifier[_hash] keyword[is] keyword[None] keyword[and] identifier[cls] keyword[not] keyword[in] identifier[mcs] . identifier[_get_base_classes] ():
identifier[mcs] . identifier[_base_classes] . identifier[insert] ( literal[int] , identifier[cls] )
identifier[mcs] . identifier[_generated_class] ={}
keyword[elif] identifier[_hash] keyword[is] keyword[not] keyword[None] keyword[and] identifier[cls] keyword[not] keyword[in] identifier[mcs] . identifier[_get_base_classes] ( identifier[_hash] ):
identifier[mcs] . identifier[_base_classes_hash] [ identifier[_hash] ]. identifier[insert] ( literal[int] , identifier[cls] )
identifier[mcs] . identifier[_generated_class] [ identifier[_hash] ]= keyword[None] | def _add_base_class(mcs, cls):
""" Adds new class *cls* to base classes
"""
# Do all magic only if subclass had defined required attributes
if getattr(mcs, '_base_classes_hash', None) is not None:
meta = getattr(cls, 'Meta', None)
_hash = getattr(meta, mcs._hashattr, None)
if _hash is None and cls not in mcs._get_base_classes():
mcs._base_classes.insert(0, cls)
mcs._generated_class = {} # Cleanup all caches # depends on [control=['if'], data=[]]
elif _hash is not None and cls not in mcs._get_base_classes(_hash):
mcs._base_classes_hash[_hash].insert(0, cls)
mcs._generated_class[_hash] = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def remove_entry(self, entry = None):
"""This method can remove entries.
The v1Entry-object entry is needed.
"""
if entry is None or type(entry) is not v1Entry:
raise KPError("Need an entry.")
elif entry in self.entries:
entry.group.entries.remove(entry)
self.entries.remove(entry)
self._num_entries -= 1
return True
else:
raise KPError("Given entry doesn't exist.") | def function[remove_entry, parameter[self, entry]]:
constant[This method can remove entries.
The v1Entry-object entry is needed.
]
if <ast.BoolOp object at 0x7da1b26ca230> begin[:]
<ast.Raise object at 0x7da1b26caf80> | keyword[def] identifier[remove_entry] ( identifier[self] , identifier[entry] = keyword[None] ):
literal[string]
keyword[if] identifier[entry] keyword[is] keyword[None] keyword[or] identifier[type] ( identifier[entry] ) keyword[is] keyword[not] identifier[v1Entry] :
keyword[raise] identifier[KPError] ( literal[string] )
keyword[elif] identifier[entry] keyword[in] identifier[self] . identifier[entries] :
identifier[entry] . identifier[group] . identifier[entries] . identifier[remove] ( identifier[entry] )
identifier[self] . identifier[entries] . identifier[remove] ( identifier[entry] )
identifier[self] . identifier[_num_entries] -= literal[int]
keyword[return] keyword[True]
keyword[else] :
keyword[raise] identifier[KPError] ( literal[string] ) | def remove_entry(self, entry=None):
"""This method can remove entries.
The v1Entry-object entry is needed.
"""
if entry is None or type(entry) is not v1Entry:
raise KPError('Need an entry.') # depends on [control=['if'], data=[]]
elif entry in self.entries:
entry.group.entries.remove(entry)
self.entries.remove(entry)
self._num_entries -= 1
return True # depends on [control=['if'], data=['entry']]
else:
raise KPError("Given entry doesn't exist.") |
def service_set_tag(path, service_name, tag):
'''
Change the tag of a docker-compose service
This does not pull or up the service
This wil re-write your yaml file. Comments will be lost. Indentation is set to 2 spaces
path
Path where the docker-compose file is stored on the server
service_name
Name of the service to remove
tag
Name of the tag (often used as version) that the service image should have
CLI Example:
.. code-block:: bash
salt myminion dockercompose.service_create /path/where/docker-compose/stored service_name tag
'''
compose_result, err = __load_docker_compose(path)
if err:
return err
services = compose_result['compose_content']['services']
if service_name not in services:
return __standardize_result(False,
'Service {0} did not exists'.format(service_name),
None, None)
if 'image' not in services[service_name]:
return __standardize_result(False,
'Service {0} did not contain the variable "image"'.format(service_name),
None, None)
image = services[service_name]['image'].split(':')[0]
services[service_name]['image'] = '{0}:{1}'.format(image, tag)
return __dump_compose_file(path, compose_result,
'Service {0} is set to tag "{1}"'.format(service_name, tag),
already_existed=True) | def function[service_set_tag, parameter[path, service_name, tag]]:
constant[
Change the tag of a docker-compose service
This does not pull or up the service
This wil re-write your yaml file. Comments will be lost. Indentation is set to 2 spaces
path
Path where the docker-compose file is stored on the server
service_name
Name of the service to remove
tag
Name of the tag (often used as version) that the service image should have
CLI Example:
.. code-block:: bash
salt myminion dockercompose.service_create /path/where/docker-compose/stored service_name tag
]
<ast.Tuple object at 0x7da1b21ae950> assign[=] call[name[__load_docker_compose], parameter[name[path]]]
if name[err] begin[:]
return[name[err]]
variable[services] assign[=] call[call[name[compose_result]][constant[compose_content]]][constant[services]]
if compare[name[service_name] <ast.NotIn object at 0x7da2590d7190> name[services]] begin[:]
return[call[name[__standardize_result], parameter[constant[False], call[constant[Service {0} did not exists].format, parameter[name[service_name]]], constant[None], constant[None]]]]
if compare[constant[image] <ast.NotIn object at 0x7da2590d7190> call[name[services]][name[service_name]]] begin[:]
return[call[name[__standardize_result], parameter[constant[False], call[constant[Service {0} did not contain the variable "image"].format, parameter[name[service_name]]], constant[None], constant[None]]]]
variable[image] assign[=] call[call[call[call[name[services]][name[service_name]]][constant[image]].split, parameter[constant[:]]]][constant[0]]
call[call[name[services]][name[service_name]]][constant[image]] assign[=] call[constant[{0}:{1}].format, parameter[name[image], name[tag]]]
return[call[name[__dump_compose_file], parameter[name[path], name[compose_result], call[constant[Service {0} is set to tag "{1}"].format, parameter[name[service_name], name[tag]]]]]] | keyword[def] identifier[service_set_tag] ( identifier[path] , identifier[service_name] , identifier[tag] ):
literal[string]
identifier[compose_result] , identifier[err] = identifier[__load_docker_compose] ( identifier[path] )
keyword[if] identifier[err] :
keyword[return] identifier[err]
identifier[services] = identifier[compose_result] [ literal[string] ][ literal[string] ]
keyword[if] identifier[service_name] keyword[not] keyword[in] identifier[services] :
keyword[return] identifier[__standardize_result] ( keyword[False] ,
literal[string] . identifier[format] ( identifier[service_name] ),
keyword[None] , keyword[None] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[services] [ identifier[service_name] ]:
keyword[return] identifier[__standardize_result] ( keyword[False] ,
literal[string] . identifier[format] ( identifier[service_name] ),
keyword[None] , keyword[None] )
identifier[image] = identifier[services] [ identifier[service_name] ][ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]
identifier[services] [ identifier[service_name] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[image] , identifier[tag] )
keyword[return] identifier[__dump_compose_file] ( identifier[path] , identifier[compose_result] ,
literal[string] . identifier[format] ( identifier[service_name] , identifier[tag] ),
identifier[already_existed] = keyword[True] ) | def service_set_tag(path, service_name, tag):
"""
Change the tag of a docker-compose service
This does not pull or up the service
This wil re-write your yaml file. Comments will be lost. Indentation is set to 2 spaces
path
Path where the docker-compose file is stored on the server
service_name
Name of the service to remove
tag
Name of the tag (often used as version) that the service image should have
CLI Example:
.. code-block:: bash
salt myminion dockercompose.service_create /path/where/docker-compose/stored service_name tag
"""
(compose_result, err) = __load_docker_compose(path)
if err:
return err # depends on [control=['if'], data=[]]
services = compose_result['compose_content']['services']
if service_name not in services:
return __standardize_result(False, 'Service {0} did not exists'.format(service_name), None, None) # depends on [control=['if'], data=['service_name']]
if 'image' not in services[service_name]:
return __standardize_result(False, 'Service {0} did not contain the variable "image"'.format(service_name), None, None) # depends on [control=['if'], data=[]]
image = services[service_name]['image'].split(':')[0]
services[service_name]['image'] = '{0}:{1}'.format(image, tag)
return __dump_compose_file(path, compose_result, 'Service {0} is set to tag "{1}"'.format(service_name, tag), already_existed=True) |
def define_spotsignal(self):
"""
Identify the "expected" flux value at the time of each observation based on the
Kepler long-cadence data, to ensure variations observed are not the effects of a single
large starspot. Only works if the target star was targeted for long or short cadence
observations during the primary mission.
"""
client = kplr.API()
star = client.star(self.kic)
lcs = star.get_light_curves(short_cadence=False)
time, flux, ferr, qual = [], [], [], []
for lc in lcs:
with lc.open() as f:
hdu_data = f[1].data
time.append(hdu_data["time"])
flux.append(hdu_data["pdcsap_flux"])
ferr.append(hdu_data["pdcsap_flux_err"])
qual.append(hdu_data["sap_quality"])
tout = np.array([])
fout = np.array([])
eout = np.array([])
for i in range(len(flux)):
t = time[i][qual[i] == 0]
f = flux[i][qual[i] == 0]
e = ferr[i][qual[i] == 0]
t = t[np.isfinite(f)]
e = e[np.isfinite(f)]
f = f[np.isfinite(f)]
e /= np.median(f)
f /= np.median(f)
tout = np.append(tout, t[50:]+54833)
fout = np.append(fout, f[50:])
eout = np.append(eout, e[50:])
self.spot_signal = np.zeros(52)
for i in range(len(self.times)):
if self.times[i] < 55000:
self.spot_signal[i] = 1.0
else:
self.spot_signal[i] = fout[np.abs(self.times[i] - tout) == np.min(np.abs(self.times[i] - tout))] | def function[define_spotsignal, parameter[self]]:
constant[
Identify the "expected" flux value at the time of each observation based on the
Kepler long-cadence data, to ensure variations observed are not the effects of a single
large starspot. Only works if the target star was targeted for long or short cadence
observations during the primary mission.
]
variable[client] assign[=] call[name[kplr].API, parameter[]]
variable[star] assign[=] call[name[client].star, parameter[name[self].kic]]
variable[lcs] assign[=] call[name[star].get_light_curves, parameter[]]
<ast.Tuple object at 0x7da20c6a8b80> assign[=] tuple[[<ast.List object at 0x7da20c6aa350>, <ast.List object at 0x7da20c6a9810>, <ast.List object at 0x7da20c6aab90>, <ast.List object at 0x7da20c6a8c70>]]
for taget[name[lc]] in starred[name[lcs]] begin[:]
with call[name[lc].open, parameter[]] begin[:]
variable[hdu_data] assign[=] call[name[f]][constant[1]].data
call[name[time].append, parameter[call[name[hdu_data]][constant[time]]]]
call[name[flux].append, parameter[call[name[hdu_data]][constant[pdcsap_flux]]]]
call[name[ferr].append, parameter[call[name[hdu_data]][constant[pdcsap_flux_err]]]]
call[name[qual].append, parameter[call[name[hdu_data]][constant[sap_quality]]]]
variable[tout] assign[=] call[name[np].array, parameter[list[[]]]]
variable[fout] assign[=] call[name[np].array, parameter[list[[]]]]
variable[eout] assign[=] call[name[np].array, parameter[list[[]]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[flux]]]]]] begin[:]
variable[t] assign[=] call[call[name[time]][name[i]]][compare[call[name[qual]][name[i]] equal[==] constant[0]]]
variable[f] assign[=] call[call[name[flux]][name[i]]][compare[call[name[qual]][name[i]] equal[==] constant[0]]]
variable[e] assign[=] call[call[name[ferr]][name[i]]][compare[call[name[qual]][name[i]] equal[==] constant[0]]]
variable[t] assign[=] call[name[t]][call[name[np].isfinite, parameter[name[f]]]]
variable[e] assign[=] call[name[e]][call[name[np].isfinite, parameter[name[f]]]]
variable[f] assign[=] call[name[f]][call[name[np].isfinite, parameter[name[f]]]]
<ast.AugAssign object at 0x7da1afec0310>
<ast.AugAssign object at 0x7da1afec30d0>
variable[tout] assign[=] call[name[np].append, parameter[name[tout], binary_operation[call[name[t]][<ast.Slice object at 0x7da1afec2e30>] + constant[54833]]]]
variable[fout] assign[=] call[name[np].append, parameter[name[fout], call[name[f]][<ast.Slice object at 0x7da1afec2560>]]]
variable[eout] assign[=] call[name[np].append, parameter[name[eout], call[name[e]][<ast.Slice object at 0x7da1afec27a0>]]]
name[self].spot_signal assign[=] call[name[np].zeros, parameter[constant[52]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].times]]]]] begin[:]
if compare[call[name[self].times][name[i]] less[<] constant[55000]] begin[:]
call[name[self].spot_signal][name[i]] assign[=] constant[1.0] | keyword[def] identifier[define_spotsignal] ( identifier[self] ):
literal[string]
identifier[client] = identifier[kplr] . identifier[API] ()
identifier[star] = identifier[client] . identifier[star] ( identifier[self] . identifier[kic] )
identifier[lcs] = identifier[star] . identifier[get_light_curves] ( identifier[short_cadence] = keyword[False] )
identifier[time] , identifier[flux] , identifier[ferr] , identifier[qual] =[],[],[],[]
keyword[for] identifier[lc] keyword[in] identifier[lcs] :
keyword[with] identifier[lc] . identifier[open] () keyword[as] identifier[f] :
identifier[hdu_data] = identifier[f] [ literal[int] ]. identifier[data]
identifier[time] . identifier[append] ( identifier[hdu_data] [ literal[string] ])
identifier[flux] . identifier[append] ( identifier[hdu_data] [ literal[string] ])
identifier[ferr] . identifier[append] ( identifier[hdu_data] [ literal[string] ])
identifier[qual] . identifier[append] ( identifier[hdu_data] [ literal[string] ])
identifier[tout] = identifier[np] . identifier[array] ([])
identifier[fout] = identifier[np] . identifier[array] ([])
identifier[eout] = identifier[np] . identifier[array] ([])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[flux] )):
identifier[t] = identifier[time] [ identifier[i] ][ identifier[qual] [ identifier[i] ]== literal[int] ]
identifier[f] = identifier[flux] [ identifier[i] ][ identifier[qual] [ identifier[i] ]== literal[int] ]
identifier[e] = identifier[ferr] [ identifier[i] ][ identifier[qual] [ identifier[i] ]== literal[int] ]
identifier[t] = identifier[t] [ identifier[np] . identifier[isfinite] ( identifier[f] )]
identifier[e] = identifier[e] [ identifier[np] . identifier[isfinite] ( identifier[f] )]
identifier[f] = identifier[f] [ identifier[np] . identifier[isfinite] ( identifier[f] )]
identifier[e] /= identifier[np] . identifier[median] ( identifier[f] )
identifier[f] /= identifier[np] . identifier[median] ( identifier[f] )
identifier[tout] = identifier[np] . identifier[append] ( identifier[tout] , identifier[t] [ literal[int] :]+ literal[int] )
identifier[fout] = identifier[np] . identifier[append] ( identifier[fout] , identifier[f] [ literal[int] :])
identifier[eout] = identifier[np] . identifier[append] ( identifier[eout] , identifier[e] [ literal[int] :])
identifier[self] . identifier[spot_signal] = identifier[np] . identifier[zeros] ( literal[int] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[times] )):
keyword[if] identifier[self] . identifier[times] [ identifier[i] ]< literal[int] :
identifier[self] . identifier[spot_signal] [ identifier[i] ]= literal[int]
keyword[else] :
identifier[self] . identifier[spot_signal] [ identifier[i] ]= identifier[fout] [ identifier[np] . identifier[abs] ( identifier[self] . identifier[times] [ identifier[i] ]- identifier[tout] )== identifier[np] . identifier[min] ( identifier[np] . identifier[abs] ( identifier[self] . identifier[times] [ identifier[i] ]- identifier[tout] ))] | def define_spotsignal(self):
"""
Identify the "expected" flux value at the time of each observation based on the
Kepler long-cadence data, to ensure variations observed are not the effects of a single
large starspot. Only works if the target star was targeted for long or short cadence
observations during the primary mission.
"""
client = kplr.API()
star = client.star(self.kic)
lcs = star.get_light_curves(short_cadence=False)
(time, flux, ferr, qual) = ([], [], [], [])
for lc in lcs:
with lc.open() as f:
hdu_data = f[1].data
time.append(hdu_data['time'])
flux.append(hdu_data['pdcsap_flux'])
ferr.append(hdu_data['pdcsap_flux_err'])
qual.append(hdu_data['sap_quality']) # depends on [control=['with'], data=['f']]
tout = np.array([])
fout = np.array([])
eout = np.array([])
for i in range(len(flux)):
t = time[i][qual[i] == 0]
f = flux[i][qual[i] == 0]
e = ferr[i][qual[i] == 0]
t = t[np.isfinite(f)]
e = e[np.isfinite(f)]
f = f[np.isfinite(f)]
e /= np.median(f)
f /= np.median(f)
tout = np.append(tout, t[50:] + 54833)
fout = np.append(fout, f[50:])
eout = np.append(eout, e[50:]) # depends on [control=['for'], data=['i']]
self.spot_signal = np.zeros(52)
for i in range(len(self.times)):
if self.times[i] < 55000:
self.spot_signal[i] = 1.0 # depends on [control=['if'], data=[]]
else:
self.spot_signal[i] = fout[np.abs(self.times[i] - tout) == np.min(np.abs(self.times[i] - tout))] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['lc']] |
def randomize(self, period=None):
"""Randomize the permutation table used by the noise functions. This
makes them generate a different noise pattern for the same inputs.
"""
if period is not None:
self.period = period
perm = list(range(self.period))
perm_right = self.period - 1
for i in list(perm):
j = self.randint_function(0, perm_right)
perm[i], perm[j] = perm[j], perm[i]
self.permutation = tuple(perm) * 2 | def function[randomize, parameter[self, period]]:
constant[Randomize the permutation table used by the noise functions. This
makes them generate a different noise pattern for the same inputs.
]
if compare[name[period] is_not constant[None]] begin[:]
name[self].period assign[=] name[period]
variable[perm] assign[=] call[name[list], parameter[call[name[range], parameter[name[self].period]]]]
variable[perm_right] assign[=] binary_operation[name[self].period - constant[1]]
for taget[name[i]] in starred[call[name[list], parameter[name[perm]]]] begin[:]
variable[j] assign[=] call[name[self].randint_function, parameter[constant[0], name[perm_right]]]
<ast.Tuple object at 0x7da1b19b78e0> assign[=] tuple[[<ast.Subscript object at 0x7da1b19b5000>, <ast.Subscript object at 0x7da1b19b7460>]]
name[self].permutation assign[=] binary_operation[call[name[tuple], parameter[name[perm]]] * constant[2]] | keyword[def] identifier[randomize] ( identifier[self] , identifier[period] = keyword[None] ):
literal[string]
keyword[if] identifier[period] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[period] = identifier[period]
identifier[perm] = identifier[list] ( identifier[range] ( identifier[self] . identifier[period] ))
identifier[perm_right] = identifier[self] . identifier[period] - literal[int]
keyword[for] identifier[i] keyword[in] identifier[list] ( identifier[perm] ):
identifier[j] = identifier[self] . identifier[randint_function] ( literal[int] , identifier[perm_right] )
identifier[perm] [ identifier[i] ], identifier[perm] [ identifier[j] ]= identifier[perm] [ identifier[j] ], identifier[perm] [ identifier[i] ]
identifier[self] . identifier[permutation] = identifier[tuple] ( identifier[perm] )* literal[int] | def randomize(self, period=None):
"""Randomize the permutation table used by the noise functions. This
makes them generate a different noise pattern for the same inputs.
"""
if period is not None:
self.period = period # depends on [control=['if'], data=['period']]
perm = list(range(self.period))
perm_right = self.period - 1
for i in list(perm):
j = self.randint_function(0, perm_right)
(perm[i], perm[j]) = (perm[j], perm[i]) # depends on [control=['for'], data=['i']]
self.permutation = tuple(perm) * 2 |
def execute_command(self, *args, **options):
"Execute a command and return a parsed response"
pool = self.connection_pool
command_name = args[0]
connection = pool.get_connection(command_name, **options)
try:
connection.send_command(*args)
return self.parse_response(connection, command_name, **options)
except (ConnectionError, TimeoutError) as e:
connection.disconnect()
if not (connection.retry_on_timeout and
isinstance(e, TimeoutError)):
raise
connection.send_command(*args)
return self.parse_response(connection, command_name, **options)
finally:
pool.release(connection) | def function[execute_command, parameter[self]]:
constant[Execute a command and return a parsed response]
variable[pool] assign[=] name[self].connection_pool
variable[command_name] assign[=] call[name[args]][constant[0]]
variable[connection] assign[=] call[name[pool].get_connection, parameter[name[command_name]]]
<ast.Try object at 0x7da18dc9be20> | keyword[def] identifier[execute_command] ( identifier[self] ,* identifier[args] ,** identifier[options] ):
literal[string]
identifier[pool] = identifier[self] . identifier[connection_pool]
identifier[command_name] = identifier[args] [ literal[int] ]
identifier[connection] = identifier[pool] . identifier[get_connection] ( identifier[command_name] ,** identifier[options] )
keyword[try] :
identifier[connection] . identifier[send_command] (* identifier[args] )
keyword[return] identifier[self] . identifier[parse_response] ( identifier[connection] , identifier[command_name] ,** identifier[options] )
keyword[except] ( identifier[ConnectionError] , identifier[TimeoutError] ) keyword[as] identifier[e] :
identifier[connection] . identifier[disconnect] ()
keyword[if] keyword[not] ( identifier[connection] . identifier[retry_on_timeout] keyword[and]
identifier[isinstance] ( identifier[e] , identifier[TimeoutError] )):
keyword[raise]
identifier[connection] . identifier[send_command] (* identifier[args] )
keyword[return] identifier[self] . identifier[parse_response] ( identifier[connection] , identifier[command_name] ,** identifier[options] )
keyword[finally] :
identifier[pool] . identifier[release] ( identifier[connection] ) | def execute_command(self, *args, **options):
"""Execute a command and return a parsed response"""
pool = self.connection_pool
command_name = args[0]
connection = pool.get_connection(command_name, **options)
try:
connection.send_command(*args)
return self.parse_response(connection, command_name, **options) # depends on [control=['try'], data=[]]
except (ConnectionError, TimeoutError) as e:
connection.disconnect()
if not (connection.retry_on_timeout and isinstance(e, TimeoutError)):
raise # depends on [control=['if'], data=[]]
connection.send_command(*args)
return self.parse_response(connection, command_name, **options) # depends on [control=['except'], data=['e']]
finally:
pool.release(connection) |
def set_dirty_and_flush_changes(non_const_func):
"""Return method that checks whether given non constant function may be called.
If so, the instance will be set dirty.
Additionally, we flush the changes right to disk"""
def flush_changes(self, *args, **kwargs):
rval = non_const_func(self, *args, **kwargs)
self._dirty = True
self.write()
return rval
# END wrapper method
flush_changes.__name__ = non_const_func.__name__
return flush_changes | def function[set_dirty_and_flush_changes, parameter[non_const_func]]:
constant[Return method that checks whether given non constant function may be called.
If so, the instance will be set dirty.
Additionally, we flush the changes right to disk]
def function[flush_changes, parameter[self]]:
variable[rval] assign[=] call[name[non_const_func], parameter[name[self], <ast.Starred object at 0x7da1b1d12e00>]]
name[self]._dirty assign[=] constant[True]
call[name[self].write, parameter[]]
return[name[rval]]
name[flush_changes].__name__ assign[=] name[non_const_func].__name__
return[name[flush_changes]] | keyword[def] identifier[set_dirty_and_flush_changes] ( identifier[non_const_func] ):
literal[string]
keyword[def] identifier[flush_changes] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
identifier[rval] = identifier[non_const_func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] )
identifier[self] . identifier[_dirty] = keyword[True]
identifier[self] . identifier[write] ()
keyword[return] identifier[rval]
identifier[flush_changes] . identifier[__name__] = identifier[non_const_func] . identifier[__name__]
keyword[return] identifier[flush_changes] | def set_dirty_and_flush_changes(non_const_func):
"""Return method that checks whether given non constant function may be called.
If so, the instance will be set dirty.
Additionally, we flush the changes right to disk"""
def flush_changes(self, *args, **kwargs):
rval = non_const_func(self, *args, **kwargs)
self._dirty = True
self.write()
return rval
# END wrapper method
flush_changes.__name__ = non_const_func.__name__
return flush_changes |
def adjust_hue(img, hue_factor):
"""Adjust hue of an image.
The image hue is adjusted by converting the image to HSV and
cyclically shifting the intensities in the hue channel (H).
The image is then converted back to original image mode.
`hue_factor` is the amount of shift in H channel and must be in the
interval `[-0.5, 0.5]`.
See `Hue`_ for more details.
.. _Hue: https://en.wikipedia.org/wiki/Hue
Args:
img (PIL Image): PIL Image to be adjusted.
hue_factor (float): How much to shift the hue channel. Should be in
[-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in
HSV space in positive and negative direction respectively.
0 means no shift. Therefore, both -0.5 and 0.5 will give an image
with complementary colors while 0 gives the original image.
Returns:
PIL Image: Hue adjusted image.
"""
if not(-0.5 <= hue_factor <= 0.5):
raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor))
if not _is_pil_image(img):
raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
input_mode = img.mode
if input_mode in {'L', '1', 'I', 'F'}:
return img
h, s, v = img.convert('HSV').split()
np_h = np.array(h, dtype=np.uint8)
# uint8 addition take cares of rotation across boundaries
with np.errstate(over='ignore'):
np_h += np.uint8(hue_factor * 255)
h = Image.fromarray(np_h, 'L')
img = Image.merge('HSV', (h, s, v)).convert(input_mode)
return img | def function[adjust_hue, parameter[img, hue_factor]]:
constant[Adjust hue of an image.
The image hue is adjusted by converting the image to HSV and
cyclically shifting the intensities in the hue channel (H).
The image is then converted back to original image mode.
`hue_factor` is the amount of shift in H channel and must be in the
interval `[-0.5, 0.5]`.
See `Hue`_ for more details.
.. _Hue: https://en.wikipedia.org/wiki/Hue
Args:
img (PIL Image): PIL Image to be adjusted.
hue_factor (float): How much to shift the hue channel. Should be in
[-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in
HSV space in positive and negative direction respectively.
0 means no shift. Therefore, both -0.5 and 0.5 will give an image
with complementary colors while 0 gives the original image.
Returns:
PIL Image: Hue adjusted image.
]
if <ast.UnaryOp object at 0x7da1b03fbdf0> begin[:]
<ast.Raise object at 0x7da1b03f92d0>
if <ast.UnaryOp object at 0x7da1b033eb00> begin[:]
<ast.Raise object at 0x7da1b033faf0>
variable[input_mode] assign[=] name[img].mode
if compare[name[input_mode] in <ast.Set object at 0x7da1b033c6d0>] begin[:]
return[name[img]]
<ast.Tuple object at 0x7da1b033c550> assign[=] call[call[name[img].convert, parameter[constant[HSV]]].split, parameter[]]
variable[np_h] assign[=] call[name[np].array, parameter[name[h]]]
with call[name[np].errstate, parameter[]] begin[:]
<ast.AugAssign object at 0x7da1b033f7c0>
variable[h] assign[=] call[name[Image].fromarray, parameter[name[np_h], constant[L]]]
variable[img] assign[=] call[call[name[Image].merge, parameter[constant[HSV], tuple[[<ast.Name object at 0x7da1b0323c10>, <ast.Name object at 0x7da1b03230a0>, <ast.Name object at 0x7da1b0320dc0>]]]].convert, parameter[name[input_mode]]]
return[name[img]] | keyword[def] identifier[adjust_hue] ( identifier[img] , identifier[hue_factor] ):
literal[string]
keyword[if] keyword[not] (- literal[int] <= identifier[hue_factor] <= literal[int] ):
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[hue_factor] ))
keyword[if] keyword[not] identifier[_is_pil_image] ( identifier[img] ):
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[img] )))
identifier[input_mode] = identifier[img] . identifier[mode]
keyword[if] identifier[input_mode] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }:
keyword[return] identifier[img]
identifier[h] , identifier[s] , identifier[v] = identifier[img] . identifier[convert] ( literal[string] ). identifier[split] ()
identifier[np_h] = identifier[np] . identifier[array] ( identifier[h] , identifier[dtype] = identifier[np] . identifier[uint8] )
keyword[with] identifier[np] . identifier[errstate] ( identifier[over] = literal[string] ):
identifier[np_h] += identifier[np] . identifier[uint8] ( identifier[hue_factor] * literal[int] )
identifier[h] = identifier[Image] . identifier[fromarray] ( identifier[np_h] , literal[string] )
identifier[img] = identifier[Image] . identifier[merge] ( literal[string] ,( identifier[h] , identifier[s] , identifier[v] )). identifier[convert] ( identifier[input_mode] )
keyword[return] identifier[img] | def adjust_hue(img, hue_factor):
"""Adjust hue of an image.
The image hue is adjusted by converting the image to HSV and
cyclically shifting the intensities in the hue channel (H).
The image is then converted back to original image mode.
`hue_factor` is the amount of shift in H channel and must be in the
interval `[-0.5, 0.5]`.
See `Hue`_ for more details.
.. _Hue: https://en.wikipedia.org/wiki/Hue
Args:
img (PIL Image): PIL Image to be adjusted.
hue_factor (float): How much to shift the hue channel. Should be in
[-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in
HSV space in positive and negative direction respectively.
0 means no shift. Therefore, both -0.5 and 0.5 will give an image
with complementary colors while 0 gives the original image.
Returns:
PIL Image: Hue adjusted image.
"""
if not -0.5 <= hue_factor <= 0.5:
raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor)) # depends on [control=['if'], data=[]]
if not _is_pil_image(img):
raise TypeError('img should be PIL Image. Got {}'.format(type(img))) # depends on [control=['if'], data=[]]
input_mode = img.mode
if input_mode in {'L', '1', 'I', 'F'}:
return img # depends on [control=['if'], data=[]]
(h, s, v) = img.convert('HSV').split()
np_h = np.array(h, dtype=np.uint8)
# uint8 addition take cares of rotation across boundaries
with np.errstate(over='ignore'):
np_h += np.uint8(hue_factor * 255) # depends on [control=['with'], data=[]]
h = Image.fromarray(np_h, 'L')
img = Image.merge('HSV', (h, s, v)).convert(input_mode)
return img |
def cleanup(graph, subgraphs):
"""Clean up the metadata in the subgraphs.
:type graph: pybel.BELGraph
:type subgraphs: dict[Any,pybel.BELGraph]
"""
for subgraph in subgraphs.values():
update_node_helper(graph, subgraph)
update_metadata(graph, subgraph) | def function[cleanup, parameter[graph, subgraphs]]:
constant[Clean up the metadata in the subgraphs.
:type graph: pybel.BELGraph
:type subgraphs: dict[Any,pybel.BELGraph]
]
for taget[name[subgraph]] in starred[call[name[subgraphs].values, parameter[]]] begin[:]
call[name[update_node_helper], parameter[name[graph], name[subgraph]]]
call[name[update_metadata], parameter[name[graph], name[subgraph]]] | keyword[def] identifier[cleanup] ( identifier[graph] , identifier[subgraphs] ):
literal[string]
keyword[for] identifier[subgraph] keyword[in] identifier[subgraphs] . identifier[values] ():
identifier[update_node_helper] ( identifier[graph] , identifier[subgraph] )
identifier[update_metadata] ( identifier[graph] , identifier[subgraph] ) | def cleanup(graph, subgraphs):
"""Clean up the metadata in the subgraphs.
:type graph: pybel.BELGraph
:type subgraphs: dict[Any,pybel.BELGraph]
"""
for subgraph in subgraphs.values():
update_node_helper(graph, subgraph)
update_metadata(graph, subgraph) # depends on [control=['for'], data=['subgraph']] |
def get_family_hierarchy_session(self, proxy):
"""Gets the ``OsidSession`` associated with the family hierarchy service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.relationship.FamilyHierarchySession) - a
``FamilyHierarchySession`` for families
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_family_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_family_hierarchy()`` is ``true``.*
"""
if not self.supports_family_hierarchy():
raise errors.Unimplemented()
# pylint: disable=no-member
return sessions.FamilyHierarchySession(proxy=proxy, runtime=self._runtime) | def function[get_family_hierarchy_session, parameter[self, proxy]]:
constant[Gets the ``OsidSession`` associated with the family hierarchy service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.relationship.FamilyHierarchySession) - a
``FamilyHierarchySession`` for families
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_family_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_family_hierarchy()`` is ``true``.*
]
if <ast.UnaryOp object at 0x7da204566800> begin[:]
<ast.Raise object at 0x7da204564430>
return[call[name[sessions].FamilyHierarchySession, parameter[]]] | keyword[def] identifier[get_family_hierarchy_session] ( identifier[self] , identifier[proxy] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_family_hierarchy] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[FamilyHierarchySession] ( identifier[proxy] = identifier[proxy] , identifier[runtime] = identifier[self] . identifier[_runtime] ) | def get_family_hierarchy_session(self, proxy):
"""Gets the ``OsidSession`` associated with the family hierarchy service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.relationship.FamilyHierarchySession) - a
``FamilyHierarchySession`` for families
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_family_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_family_hierarchy()`` is ``true``.*
"""
if not self.supports_family_hierarchy():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
# pylint: disable=no-member
return sessions.FamilyHierarchySession(proxy=proxy, runtime=self._runtime) |
def clear_expired_cookies(self):
"""Discard all expired cookies.
You probably don't need to call this method: expired cookies are never
sent back to the server (provided you're using DefaultCookiePolicy),
this method is called by CookieJar itself every so often, and the
.save() method won't save expired cookies anyway (unless you ask
otherwise by passing a true ignore_expires argument).
"""
self._cookies_lock.acquire()
try:
now = time.time()
for cookie in self:
if cookie.is_expired(now):
self.clear(cookie.domain, cookie.path, cookie.name)
finally:
self._cookies_lock.release() | def function[clear_expired_cookies, parameter[self]]:
constant[Discard all expired cookies.
You probably don't need to call this method: expired cookies are never
sent back to the server (provided you're using DefaultCookiePolicy),
this method is called by CookieJar itself every so often, and the
.save() method won't save expired cookies anyway (unless you ask
otherwise by passing a true ignore_expires argument).
]
call[name[self]._cookies_lock.acquire, parameter[]]
<ast.Try object at 0x7da18ede71f0> | keyword[def] identifier[clear_expired_cookies] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_cookies_lock] . identifier[acquire] ()
keyword[try] :
identifier[now] = identifier[time] . identifier[time] ()
keyword[for] identifier[cookie] keyword[in] identifier[self] :
keyword[if] identifier[cookie] . identifier[is_expired] ( identifier[now] ):
identifier[self] . identifier[clear] ( identifier[cookie] . identifier[domain] , identifier[cookie] . identifier[path] , identifier[cookie] . identifier[name] )
keyword[finally] :
identifier[self] . identifier[_cookies_lock] . identifier[release] () | def clear_expired_cookies(self):
"""Discard all expired cookies.
You probably don't need to call this method: expired cookies are never
sent back to the server (provided you're using DefaultCookiePolicy),
this method is called by CookieJar itself every so often, and the
.save() method won't save expired cookies anyway (unless you ask
otherwise by passing a true ignore_expires argument).
"""
self._cookies_lock.acquire()
try:
now = time.time()
for cookie in self:
if cookie.is_expired(now):
self.clear(cookie.domain, cookie.path, cookie.name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cookie']] # depends on [control=['try'], data=[]]
finally:
self._cookies_lock.release() |
def ticker(self, currency="", **kwargs):
"""
This endpoint displays cryptocurrency ticker data in order of rank. The maximum
number of results per call is 100. Pagination is possible by using the
start and limit parameters.
GET /ticker/
Optional parameters:
(int) start - return results from rank [start] and above (default is 1)
(int) limit - return a maximum of [limit] results (default is 100; max is 100)
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
GET /ticker/{id}
Optional parameters:
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
"""
params = {}
params.update(kwargs)
# see https://github.com/barnumbirr/coinmarketcap/pull/28
if currency:
currency = str(currency) + '/'
response = self.__request('ticker/' + currency, params)
return response | def function[ticker, parameter[self, currency]]:
constant[
This endpoint displays cryptocurrency ticker data in order of rank. The maximum
number of results per call is 100. Pagination is possible by using the
start and limit parameters.
GET /ticker/
Optional parameters:
(int) start - return results from rank [start] and above (default is 1)
(int) limit - return a maximum of [limit] results (default is 100; max is 100)
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
GET /ticker/{id}
Optional parameters:
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
]
variable[params] assign[=] dictionary[[], []]
call[name[params].update, parameter[name[kwargs]]]
if name[currency] begin[:]
variable[currency] assign[=] binary_operation[call[name[str], parameter[name[currency]]] + constant[/]]
variable[response] assign[=] call[name[self].__request, parameter[binary_operation[constant[ticker/] + name[currency]], name[params]]]
return[name[response]] | keyword[def] identifier[ticker] ( identifier[self] , identifier[currency] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[params] ={}
identifier[params] . identifier[update] ( identifier[kwargs] )
keyword[if] identifier[currency] :
identifier[currency] = identifier[str] ( identifier[currency] )+ literal[string]
identifier[response] = identifier[self] . identifier[__request] ( literal[string] + identifier[currency] , identifier[params] )
keyword[return] identifier[response] | def ticker(self, currency='', **kwargs):
"""
This endpoint displays cryptocurrency ticker data in order of rank. The maximum
number of results per call is 100. Pagination is possible by using the
start and limit parameters.
GET /ticker/
Optional parameters:
(int) start - return results from rank [start] and above (default is 1)
(int) limit - return a maximum of [limit] results (default is 100; max is 100)
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
GET /ticker/{id}
Optional parameters:
(string) convert - return pricing info in terms of another currency.
Valid fiat currency values are: "AUD", "BRL", "CAD", "CHF", "CLP", "CNY", "CZK",
"DKK", "EUR", "GBP", "HKD", "HUF", "IDR", "ILS", "INR", "JPY", "KRW", "MXN",
"MYR", "NOK", "NZD", "PHP", "PKR", "PLN", "RUB", "SEK", "SGD", "THB", "TRY",
"TWD", "ZAR"
Valid cryptocurrency values are: "BTC", "ETH" "XRP", "LTC", and "BCH"
"""
params = {}
params.update(kwargs) # see https://github.com/barnumbirr/coinmarketcap/pull/28
if currency:
currency = str(currency) + '/' # depends on [control=['if'], data=[]]
response = self.__request('ticker/' + currency, params)
return response |
def calcsize(values, sizerange=(2,70), inds=None, plaw=3):
""" Use set of values to calculate symbol size.
values is a list of floats for candidate significance.
inds is an optional list of indexes to use to calculate symbol size.
Scaling of symbol size min max set by sizerange tuple (min, max).
plaw is powerlaw scaling of symbol size from values
"""
if inds:
smax = max([abs(values[i]) for i in inds])
smin = min([abs(values[i]) for i in inds])
else:
smax = max([abs(val) for val in values])
smin = min([abs(val) for val in values])
return [sizerange[0] + sizerange[1] * ((abs(val) - smin)/(smax - smin))**plaw for val in values] | def function[calcsize, parameter[values, sizerange, inds, plaw]]:
constant[ Use set of values to calculate symbol size.
values is a list of floats for candidate significance.
inds is an optional list of indexes to use to calculate symbol size.
Scaling of symbol size min max set by sizerange tuple (min, max).
plaw is powerlaw scaling of symbol size from values
]
if name[inds] begin[:]
variable[smax] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da1b253dc90>]]
variable[smin] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da1b253c550>]]
return[<ast.ListComp object at 0x7da1b25d8700>] | keyword[def] identifier[calcsize] ( identifier[values] , identifier[sizerange] =( literal[int] , literal[int] ), identifier[inds] = keyword[None] , identifier[plaw] = literal[int] ):
literal[string]
keyword[if] identifier[inds] :
identifier[smax] = identifier[max] ([ identifier[abs] ( identifier[values] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[inds] ])
identifier[smin] = identifier[min] ([ identifier[abs] ( identifier[values] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[inds] ])
keyword[else] :
identifier[smax] = identifier[max] ([ identifier[abs] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[values] ])
identifier[smin] = identifier[min] ([ identifier[abs] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[values] ])
keyword[return] [ identifier[sizerange] [ literal[int] ]+ identifier[sizerange] [ literal[int] ]*(( identifier[abs] ( identifier[val] )- identifier[smin] )/( identifier[smax] - identifier[smin] ))** identifier[plaw] keyword[for] identifier[val] keyword[in] identifier[values] ] | def calcsize(values, sizerange=(2, 70), inds=None, plaw=3):
""" Use set of values to calculate symbol size.
values is a list of floats for candidate significance.
inds is an optional list of indexes to use to calculate symbol size.
Scaling of symbol size min max set by sizerange tuple (min, max).
plaw is powerlaw scaling of symbol size from values
"""
if inds:
smax = max([abs(values[i]) for i in inds])
smin = min([abs(values[i]) for i in inds]) # depends on [control=['if'], data=[]]
else:
smax = max([abs(val) for val in values])
smin = min([abs(val) for val in values])
return [sizerange[0] + sizerange[1] * ((abs(val) - smin) / (smax - smin)) ** plaw for val in values] |
def requote_uri(uri):
"""Requote uri if it contains non-ascii chars, spaces etc.
"""
# To reduce tabulator import time
import requests.utils
if six.PY2:
def url_encode_non_ascii(bytes):
pattern = '[\x80-\xFF]'
replace = lambda c: ('%%%02x' % ord(c.group(0))).upper()
return re.sub(pattern, replace, bytes)
parts = urlparse(uri)
uri = urlunparse(
part.encode('idna') if index == 1
else url_encode_non_ascii(part.encode('utf-8'))
for index, part in enumerate(parts))
return requests.utils.requote_uri(uri) | def function[requote_uri, parameter[uri]]:
constant[Requote uri if it contains non-ascii chars, spaces etc.
]
import module[requests.utils]
if name[six].PY2 begin[:]
def function[url_encode_non_ascii, parameter[bytes]]:
variable[pattern] assign[=] constant[[-ÿ]]
variable[replace] assign[=] <ast.Lambda object at 0x7da1b06fd3c0>
return[call[name[re].sub, parameter[name[pattern], name[replace], name[bytes]]]]
variable[parts] assign[=] call[name[urlparse], parameter[name[uri]]]
variable[uri] assign[=] call[name[urlunparse], parameter[<ast.GeneratorExp object at 0x7da1b06ff790>]]
return[call[name[requests].utils.requote_uri, parameter[name[uri]]]] | keyword[def] identifier[requote_uri] ( identifier[uri] ):
literal[string]
keyword[import] identifier[requests] . identifier[utils]
keyword[if] identifier[six] . identifier[PY2] :
keyword[def] identifier[url_encode_non_ascii] ( identifier[bytes] ):
identifier[pattern] = literal[string]
identifier[replace] = keyword[lambda] identifier[c] :( literal[string] % identifier[ord] ( identifier[c] . identifier[group] ( literal[int] ))). identifier[upper] ()
keyword[return] identifier[re] . identifier[sub] ( identifier[pattern] , identifier[replace] , identifier[bytes] )
identifier[parts] = identifier[urlparse] ( identifier[uri] )
identifier[uri] = identifier[urlunparse] (
identifier[part] . identifier[encode] ( literal[string] ) keyword[if] identifier[index] == literal[int]
keyword[else] identifier[url_encode_non_ascii] ( identifier[part] . identifier[encode] ( literal[string] ))
keyword[for] identifier[index] , identifier[part] keyword[in] identifier[enumerate] ( identifier[parts] ))
keyword[return] identifier[requests] . identifier[utils] . identifier[requote_uri] ( identifier[uri] ) | def requote_uri(uri):
"""Requote uri if it contains non-ascii chars, spaces etc.
"""
# To reduce tabulator import time
import requests.utils
if six.PY2:
def url_encode_non_ascii(bytes):
pattern = '[\x80-ÿ]'
replace = lambda c: ('%%%02x' % ord(c.group(0))).upper()
return re.sub(pattern, replace, bytes)
parts = urlparse(uri)
uri = urlunparse((part.encode('idna') if index == 1 else url_encode_non_ascii(part.encode('utf-8')) for (index, part) in enumerate(parts))) # depends on [control=['if'], data=[]]
return requests.utils.requote_uri(uri) |
def _fit_islands(self, islands):
"""
Execute fitting on a list of islands
This function just wraps around fit_island, so that when we do multiprocesing
a single process will fit multiple islands before returning results.
Parameters
----------
islands : list of :class:`AegeanTools.models.IslandFittingData`
The islands to be fit.
Returns
-------
sources : list
The sources that were fit.
"""
self.log.debug("Fitting group of {0} islands".format(len(islands)))
sources = []
for island in islands:
res = self._fit_island(island)
sources.extend(res)
return sources | def function[_fit_islands, parameter[self, islands]]:
constant[
Execute fitting on a list of islands
This function just wraps around fit_island, so that when we do multiprocesing
a single process will fit multiple islands before returning results.
Parameters
----------
islands : list of :class:`AegeanTools.models.IslandFittingData`
The islands to be fit.
Returns
-------
sources : list
The sources that were fit.
]
call[name[self].log.debug, parameter[call[constant[Fitting group of {0} islands].format, parameter[call[name[len], parameter[name[islands]]]]]]]
variable[sources] assign[=] list[[]]
for taget[name[island]] in starred[name[islands]] begin[:]
variable[res] assign[=] call[name[self]._fit_island, parameter[name[island]]]
call[name[sources].extend, parameter[name[res]]]
return[name[sources]] | keyword[def] identifier[_fit_islands] ( identifier[self] , identifier[islands] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[islands] )))
identifier[sources] =[]
keyword[for] identifier[island] keyword[in] identifier[islands] :
identifier[res] = identifier[self] . identifier[_fit_island] ( identifier[island] )
identifier[sources] . identifier[extend] ( identifier[res] )
keyword[return] identifier[sources] | def _fit_islands(self, islands):
"""
Execute fitting on a list of islands
This function just wraps around fit_island, so that when we do multiprocesing
a single process will fit multiple islands before returning results.
Parameters
----------
islands : list of :class:`AegeanTools.models.IslandFittingData`
The islands to be fit.
Returns
-------
sources : list
The sources that were fit.
"""
self.log.debug('Fitting group of {0} islands'.format(len(islands)))
sources = []
for island in islands:
res = self._fit_island(island)
sources.extend(res) # depends on [control=['for'], data=['island']]
return sources |
def force_on_satellite(position, mass):
"""Returns the total gravitational force acting on the body from the Earth and Moon."""
earth_grav_force = gravitational_force(position, mass, earth_position, earth_mass)
moon_grav_force = gravitational_force(position, mass, moon_position, moon_mass)
F_x = earth_grav_force[0] + moon_grav_force[0]
F_y = earth_grav_force[1] + moon_grav_force[1]
return F_x, F_y | def function[force_on_satellite, parameter[position, mass]]:
constant[Returns the total gravitational force acting on the body from the Earth and Moon.]
variable[earth_grav_force] assign[=] call[name[gravitational_force], parameter[name[position], name[mass], name[earth_position], name[earth_mass]]]
variable[moon_grav_force] assign[=] call[name[gravitational_force], parameter[name[position], name[mass], name[moon_position], name[moon_mass]]]
variable[F_x] assign[=] binary_operation[call[name[earth_grav_force]][constant[0]] + call[name[moon_grav_force]][constant[0]]]
variable[F_y] assign[=] binary_operation[call[name[earth_grav_force]][constant[1]] + call[name[moon_grav_force]][constant[1]]]
return[tuple[[<ast.Name object at 0x7da1b13e05b0>, <ast.Name object at 0x7da1b1598e20>]]] | keyword[def] identifier[force_on_satellite] ( identifier[position] , identifier[mass] ):
literal[string]
identifier[earth_grav_force] = identifier[gravitational_force] ( identifier[position] , identifier[mass] , identifier[earth_position] , identifier[earth_mass] )
identifier[moon_grav_force] = identifier[gravitational_force] ( identifier[position] , identifier[mass] , identifier[moon_position] , identifier[moon_mass] )
identifier[F_x] = identifier[earth_grav_force] [ literal[int] ]+ identifier[moon_grav_force] [ literal[int] ]
identifier[F_y] = identifier[earth_grav_force] [ literal[int] ]+ identifier[moon_grav_force] [ literal[int] ]
keyword[return] identifier[F_x] , identifier[F_y] | def force_on_satellite(position, mass):
"""Returns the total gravitational force acting on the body from the Earth and Moon."""
earth_grav_force = gravitational_force(position, mass, earth_position, earth_mass)
moon_grav_force = gravitational_force(position, mass, moon_position, moon_mass)
F_x = earth_grav_force[0] + moon_grav_force[0]
F_y = earth_grav_force[1] + moon_grav_force[1]
return (F_x, F_y) |
def change_engine_password(self, password):
""" Change Engine password for engines on allowed
list.
:param str password: password for engine level
:raises ModificationFailed: failed setting password on engine
:return: None
"""
self.make_request(
ModificationFailed,
method='update',
resource='change_engine_password',
params={'password': password}) | def function[change_engine_password, parameter[self, password]]:
constant[ Change Engine password for engines on allowed
list.
:param str password: password for engine level
:raises ModificationFailed: failed setting password on engine
:return: None
]
call[name[self].make_request, parameter[name[ModificationFailed]]] | keyword[def] identifier[change_engine_password] ( identifier[self] , identifier[password] ):
literal[string]
identifier[self] . identifier[make_request] (
identifier[ModificationFailed] ,
identifier[method] = literal[string] ,
identifier[resource] = literal[string] ,
identifier[params] ={ literal[string] : identifier[password] }) | def change_engine_password(self, password):
""" Change Engine password for engines on allowed
list.
:param str password: password for engine level
:raises ModificationFailed: failed setting password on engine
:return: None
"""
self.make_request(ModificationFailed, method='update', resource='change_engine_password', params={'password': password}) |
def copy(self):
"""
Make a new instance of this Token.
This method makes a copy of the mutable part of the token before
making the instance.
"""
return self.__class__(self.tag, self.data.copy(), self.context.copy()) | def function[copy, parameter[self]]:
constant[
Make a new instance of this Token.
This method makes a copy of the mutable part of the token before
making the instance.
]
return[call[name[self].__class__, parameter[name[self].tag, call[name[self].data.copy, parameter[]], call[name[self].context.copy, parameter[]]]]] | keyword[def] identifier[copy] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[tag] , identifier[self] . identifier[data] . identifier[copy] (), identifier[self] . identifier[context] . identifier[copy] ()) | def copy(self):
"""
Make a new instance of this Token.
This method makes a copy of the mutable part of the token before
making the instance.
"""
return self.__class__(self.tag, self.data.copy(), self.context.copy()) |
def on_resize(self, width, height):
"""
Handle resized windows.
"""
width, height = self._update_perspective(width, height)
self.scene.camera.resolution = (width, height)
self.view['ball'].resize(self.scene.camera.resolution)
self.scene.camera.transform = self.view['ball'].pose | def function[on_resize, parameter[self, width, height]]:
constant[
Handle resized windows.
]
<ast.Tuple object at 0x7da20c990670> assign[=] call[name[self]._update_perspective, parameter[name[width], name[height]]]
name[self].scene.camera.resolution assign[=] tuple[[<ast.Name object at 0x7da20c993d30>, <ast.Name object at 0x7da20c992ad0>]]
call[call[name[self].view][constant[ball]].resize, parameter[name[self].scene.camera.resolution]]
name[self].scene.camera.transform assign[=] call[name[self].view][constant[ball]].pose | keyword[def] identifier[on_resize] ( identifier[self] , identifier[width] , identifier[height] ):
literal[string]
identifier[width] , identifier[height] = identifier[self] . identifier[_update_perspective] ( identifier[width] , identifier[height] )
identifier[self] . identifier[scene] . identifier[camera] . identifier[resolution] =( identifier[width] , identifier[height] )
identifier[self] . identifier[view] [ literal[string] ]. identifier[resize] ( identifier[self] . identifier[scene] . identifier[camera] . identifier[resolution] )
identifier[self] . identifier[scene] . identifier[camera] . identifier[transform] = identifier[self] . identifier[view] [ literal[string] ]. identifier[pose] | def on_resize(self, width, height):
"""
Handle resized windows.
"""
(width, height) = self._update_perspective(width, height)
self.scene.camera.resolution = (width, height)
self.view['ball'].resize(self.scene.camera.resolution)
self.scene.camera.transform = self.view['ball'].pose |
def delete(self, ids):
"""
Method to delete object group permissions by their ids
:param ids: Identifiers of object group permissions
:return: None
"""
url = build_uri_with_ids('api/v3/object-group-perm/%s/', ids)
return super(ApiObjectGroupPermission, self).delete(url) | def function[delete, parameter[self, ids]]:
constant[
Method to delete object group permissions by their ids
:param ids: Identifiers of object group permissions
:return: None
]
variable[url] assign[=] call[name[build_uri_with_ids], parameter[constant[api/v3/object-group-perm/%s/], name[ids]]]
return[call[call[name[super], parameter[name[ApiObjectGroupPermission], name[self]]].delete, parameter[name[url]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[url] = identifier[build_uri_with_ids] ( literal[string] , identifier[ids] )
keyword[return] identifier[super] ( identifier[ApiObjectGroupPermission] , identifier[self] ). identifier[delete] ( identifier[url] ) | def delete(self, ids):
"""
Method to delete object group permissions by their ids
:param ids: Identifiers of object group permissions
:return: None
"""
url = build_uri_with_ids('api/v3/object-group-perm/%s/', ids)
return super(ApiObjectGroupPermission, self).delete(url) |
def _PrintProcessingTime(self, processing_status):
"""Prints the processing time.
Args:
processing_status (ProcessingStatus): processing status.
"""
if not processing_status:
processing_time = '00:00:00'
else:
processing_time = time.time() - processing_status.start_time
time_struct = time.gmtime(processing_time)
processing_time = time.strftime('%H:%M:%S', time_struct)
self._output_writer.Write(
'Processing time\t\t: {0:s}\n'.format(processing_time)) | def function[_PrintProcessingTime, parameter[self, processing_status]]:
constant[Prints the processing time.
Args:
processing_status (ProcessingStatus): processing status.
]
if <ast.UnaryOp object at 0x7da1b1d3a3e0> begin[:]
variable[processing_time] assign[=] constant[00:00:00]
call[name[self]._output_writer.Write, parameter[call[constant[Processing time : {0:s}
].format, parameter[name[processing_time]]]]] | keyword[def] identifier[_PrintProcessingTime] ( identifier[self] , identifier[processing_status] ):
literal[string]
keyword[if] keyword[not] identifier[processing_status] :
identifier[processing_time] = literal[string]
keyword[else] :
identifier[processing_time] = identifier[time] . identifier[time] ()- identifier[processing_status] . identifier[start_time]
identifier[time_struct] = identifier[time] . identifier[gmtime] ( identifier[processing_time] )
identifier[processing_time] = identifier[time] . identifier[strftime] ( literal[string] , identifier[time_struct] )
identifier[self] . identifier[_output_writer] . identifier[Write] (
literal[string] . identifier[format] ( identifier[processing_time] )) | def _PrintProcessingTime(self, processing_status):
"""Prints the processing time.
Args:
processing_status (ProcessingStatus): processing status.
"""
if not processing_status:
processing_time = '00:00:00' # depends on [control=['if'], data=[]]
else:
processing_time = time.time() - processing_status.start_time
time_struct = time.gmtime(processing_time)
processing_time = time.strftime('%H:%M:%S', time_struct)
self._output_writer.Write('Processing time\t\t: {0:s}\n'.format(processing_time)) |
def ensure_table(self, cls):
"""Ensure table's existence - as per the gludb spec."""
id_len = len(uuid())
index_names = cls.index_names() or []
cols = [
'id char(%d) primary key' % (id_len,),
'value jsonb'
] + [
name + ' text' for name in index_names
]
table_name = cls.get_table_name()
with self._conn() as conn:
with conn.cursor() as cur:
cur.execute('create table if not exists %s (%s);' % (
table_name,
','.join(cols)
))
for name in index_names:
cur.execute('create index if not exists %s on %s(%s);' % (
table_name + '_' + name + '_idx',
table_name,
name
)) | def function[ensure_table, parameter[self, cls]]:
constant[Ensure table's existence - as per the gludb spec.]
variable[id_len] assign[=] call[name[len], parameter[call[name[uuid], parameter[]]]]
variable[index_names] assign[=] <ast.BoolOp object at 0x7da18f8120b0>
variable[cols] assign[=] binary_operation[list[[<ast.BinOp object at 0x7da18f810400>, <ast.Constant object at 0x7da18f810700>]] + <ast.ListComp object at 0x7da18f810610>]
variable[table_name] assign[=] call[name[cls].get_table_name, parameter[]]
with call[name[self]._conn, parameter[]] begin[:]
with call[name[conn].cursor, parameter[]] begin[:]
call[name[cur].execute, parameter[binary_operation[constant[create table if not exists %s (%s);] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f813580>, <ast.Call object at 0x7da18f8119c0>]]]]]
for taget[name[name]] in starred[name[index_names]] begin[:]
call[name[cur].execute, parameter[binary_operation[constant[create index if not exists %s on %s(%s);] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da18f8134c0>, <ast.Name object at 0x7da18f813e50>, <ast.Name object at 0x7da18f813610>]]]]] | keyword[def] identifier[ensure_table] ( identifier[self] , identifier[cls] ):
literal[string]
identifier[id_len] = identifier[len] ( identifier[uuid] ())
identifier[index_names] = identifier[cls] . identifier[index_names] () keyword[or] []
identifier[cols] =[
literal[string] %( identifier[id_len] ,),
literal[string]
]+[
identifier[name] + literal[string] keyword[for] identifier[name] keyword[in] identifier[index_names]
]
identifier[table_name] = identifier[cls] . identifier[get_table_name] ()
keyword[with] identifier[self] . identifier[_conn] () keyword[as] identifier[conn] :
keyword[with] identifier[conn] . identifier[cursor] () keyword[as] identifier[cur] :
identifier[cur] . identifier[execute] ( literal[string] %(
identifier[table_name] ,
literal[string] . identifier[join] ( identifier[cols] )
))
keyword[for] identifier[name] keyword[in] identifier[index_names] :
identifier[cur] . identifier[execute] ( literal[string] %(
identifier[table_name] + literal[string] + identifier[name] + literal[string] ,
identifier[table_name] ,
identifier[name]
)) | def ensure_table(self, cls):
"""Ensure table's existence - as per the gludb spec."""
id_len = len(uuid())
index_names = cls.index_names() or []
cols = ['id char(%d) primary key' % (id_len,), 'value jsonb'] + [name + ' text' for name in index_names]
table_name = cls.get_table_name()
with self._conn() as conn:
with conn.cursor() as cur:
cur.execute('create table if not exists %s (%s);' % (table_name, ','.join(cols)))
for name in index_names:
cur.execute('create index if not exists %s on %s(%s);' % (table_name + '_' + name + '_idx', table_name, name)) # depends on [control=['for'], data=['name']] # depends on [control=['with'], data=['cur']] # depends on [control=['with'], data=['conn']] |
def ToTimedelta(self):
"""Converts Duration to timedelta."""
return timedelta(
seconds=self.seconds, microseconds=_RoundTowardZero(
self.nanos, _NANOS_PER_MICROSECOND)) | def function[ToTimedelta, parameter[self]]:
constant[Converts Duration to timedelta.]
return[call[name[timedelta], parameter[]]] | keyword[def] identifier[ToTimedelta] ( identifier[self] ):
literal[string]
keyword[return] identifier[timedelta] (
identifier[seconds] = identifier[self] . identifier[seconds] , identifier[microseconds] = identifier[_RoundTowardZero] (
identifier[self] . identifier[nanos] , identifier[_NANOS_PER_MICROSECOND] )) | def ToTimedelta(self):
"""Converts Duration to timedelta."""
return timedelta(seconds=self.seconds, microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) |
def properties(self, value):
"""The properties property.
Args:
value (hash). the property value.
"""
if value == self._defaults['properties'] and 'properties' in self._values:
del self._values['properties']
else:
self._values['properties'] = value | def function[properties, parameter[self, value]]:
constant[The properties property.
Args:
value (hash). the property value.
]
if <ast.BoolOp object at 0x7da1b102bc40> begin[:]
<ast.Delete object at 0x7da1b102aec0> | keyword[def] identifier[properties] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[value] == identifier[self] . identifier[_defaults] [ literal[string] ] keyword[and] literal[string] keyword[in] identifier[self] . identifier[_values] :
keyword[del] identifier[self] . identifier[_values] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[_values] [ literal[string] ]= identifier[value] | def properties(self, value):
"""The properties property.
Args:
value (hash). the property value.
"""
if value == self._defaults['properties'] and 'properties' in self._values:
del self._values['properties'] # depends on [control=['if'], data=[]]
else:
self._values['properties'] = value |
def take_action(self, production_rule: str) -> 'LambdaGrammarStatelet':
"""
Takes an action in the current grammar state, returning a new grammar state with whatever
updates are necessary. The production rule is assumed to be formatted as "LHS -> RHS".
This will update the non-terminal stack and the context-dependent actions. Updating the
non-terminal stack involves popping the non-terminal that was expanded off of the stack,
then pushing on any non-terminals in the production rule back on the stack. We push the
non-terminals on in `reverse` order, so that the first non-terminal in the production rule
gets popped off the stack first.
For example, if our current ``nonterminal_stack`` is ``["r", "<e,r>", "d"]``, and
``action`` is ``d -> [<e,d>, e]``, the resulting stack will be ``["r", "<e,r>", "e",
"<e,d>"]``.
"""
left_side, right_side = production_rule.split(' -> ')
assert self._nonterminal_stack[-1] == left_side, (f"Tried to expand {self._nonterminal_stack[-1]}"
f"but got rule {left_side} -> {right_side}")
assert all(self._lambda_stacks[key][-1] == left_side for key in self._lambda_stacks)
new_stack = self._nonterminal_stack[:-1]
new_lambda_stacks = {key: self._lambda_stacks[key][:-1] for key in self._lambda_stacks}
productions = self._get_productions_from_string(right_side)
# Looking for lambda productions, but not for cells or columns with the word "lambda" in
# them.
if 'lambda' in productions[0] and 'fb:' not in productions[0]:
production = productions[0]
if production[0] == "'" and production[-1] == "'":
# The production rule with a lambda is typically "<t,d> -> ['lambda x', d]". We
# need to strip the quotes.
production = production[1:-1]
lambda_variable = production.split(' ')[1]
# The left side must be formatted as "<t,d>", where "t" is the type of the lambda
# variable, and "d" is the return type of the lambda function. We need to pull out the
# "t" here. TODO(mattg): this is pretty limiting, but I'm not sure how general we
# should make this.
if len(left_side) != 5:
raise NotImplementedError("Can't handle this type yet:", left_side)
lambda_type = left_side[1]
new_lambda_stacks[(lambda_type, lambda_variable)] = []
for production in reversed(productions):
if self._is_nonterminal(production):
new_stack.append(production)
for lambda_stack in new_lambda_stacks.values():
lambda_stack.append(production)
# If any of the lambda stacks have now become empty, we remove them from our dictionary.
new_lambda_stacks = {key: new_lambda_stacks[key]
for key in new_lambda_stacks if new_lambda_stacks[key]}
return LambdaGrammarStatelet(nonterminal_stack=new_stack,
lambda_stacks=new_lambda_stacks,
valid_actions=self._valid_actions,
context_actions=self._context_actions,
is_nonterminal=self._is_nonterminal) | def function[take_action, parameter[self, production_rule]]:
constant[
Takes an action in the current grammar state, returning a new grammar state with whatever
updates are necessary. The production rule is assumed to be formatted as "LHS -> RHS".
This will update the non-terminal stack and the context-dependent actions. Updating the
non-terminal stack involves popping the non-terminal that was expanded off of the stack,
then pushing on any non-terminals in the production rule back on the stack. We push the
non-terminals on in `reverse` order, so that the first non-terminal in the production rule
gets popped off the stack first.
For example, if our current ``nonterminal_stack`` is ``["r", "<e,r>", "d"]``, and
``action`` is ``d -> [<e,d>, e]``, the resulting stack will be ``["r", "<e,r>", "e",
"<e,d>"]``.
]
<ast.Tuple object at 0x7da1b201e7a0> assign[=] call[name[production_rule].split, parameter[constant[ -> ]]]
assert[compare[call[name[self]._nonterminal_stack][<ast.UnaryOp object at 0x7da1b201dc00>] equal[==] name[left_side]]]
assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b201e380>]]]
variable[new_stack] assign[=] call[name[self]._nonterminal_stack][<ast.Slice object at 0x7da1b201f580>]
variable[new_lambda_stacks] assign[=] <ast.DictComp object at 0x7da1b201c280>
variable[productions] assign[=] call[name[self]._get_productions_from_string, parameter[name[right_side]]]
if <ast.BoolOp object at 0x7da1b1f95720> begin[:]
variable[production] assign[=] call[name[productions]][constant[0]]
if <ast.BoolOp object at 0x7da1b1f96c50> begin[:]
variable[production] assign[=] call[name[production]][<ast.Slice object at 0x7da1b1f96800>]
variable[lambda_variable] assign[=] call[call[name[production].split, parameter[constant[ ]]]][constant[1]]
if compare[call[name[len], parameter[name[left_side]]] not_equal[!=] constant[5]] begin[:]
<ast.Raise object at 0x7da1b1f973a0>
variable[lambda_type] assign[=] call[name[left_side]][constant[1]]
call[name[new_lambda_stacks]][tuple[[<ast.Name object at 0x7da1b1f97e80>, <ast.Name object at 0x7da1b1f95d50>]]] assign[=] list[[]]
for taget[name[production]] in starred[call[name[reversed], parameter[name[productions]]]] begin[:]
if call[name[self]._is_nonterminal, parameter[name[production]]] begin[:]
call[name[new_stack].append, parameter[name[production]]]
for taget[name[lambda_stack]] in starred[call[name[new_lambda_stacks].values, parameter[]]] begin[:]
call[name[lambda_stack].append, parameter[name[production]]]
variable[new_lambda_stacks] assign[=] <ast.DictComp object at 0x7da1b1f97520>
return[call[name[LambdaGrammarStatelet], parameter[]]] | keyword[def] identifier[take_action] ( identifier[self] , identifier[production_rule] : identifier[str] )-> literal[string] :
literal[string]
identifier[left_side] , identifier[right_side] = identifier[production_rule] . identifier[split] ( literal[string] )
keyword[assert] identifier[self] . identifier[_nonterminal_stack] [- literal[int] ]== identifier[left_side] ,( literal[string]
literal[string] )
keyword[assert] identifier[all] ( identifier[self] . identifier[_lambda_stacks] [ identifier[key] ][- literal[int] ]== identifier[left_side] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_lambda_stacks] )
identifier[new_stack] = identifier[self] . identifier[_nonterminal_stack] [:- literal[int] ]
identifier[new_lambda_stacks] ={ identifier[key] : identifier[self] . identifier[_lambda_stacks] [ identifier[key] ][:- literal[int] ] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_lambda_stacks] }
identifier[productions] = identifier[self] . identifier[_get_productions_from_string] ( identifier[right_side] )
keyword[if] literal[string] keyword[in] identifier[productions] [ literal[int] ] keyword[and] literal[string] keyword[not] keyword[in] identifier[productions] [ literal[int] ]:
identifier[production] = identifier[productions] [ literal[int] ]
keyword[if] identifier[production] [ literal[int] ]== literal[string] keyword[and] identifier[production] [- literal[int] ]== literal[string] :
identifier[production] = identifier[production] [ literal[int] :- literal[int] ]
identifier[lambda_variable] = identifier[production] . identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] identifier[len] ( identifier[left_side] )!= literal[int] :
keyword[raise] identifier[NotImplementedError] ( literal[string] , identifier[left_side] )
identifier[lambda_type] = identifier[left_side] [ literal[int] ]
identifier[new_lambda_stacks] [( identifier[lambda_type] , identifier[lambda_variable] )]=[]
keyword[for] identifier[production] keyword[in] identifier[reversed] ( identifier[productions] ):
keyword[if] identifier[self] . identifier[_is_nonterminal] ( identifier[production] ):
identifier[new_stack] . identifier[append] ( identifier[production] )
keyword[for] identifier[lambda_stack] keyword[in] identifier[new_lambda_stacks] . identifier[values] ():
identifier[lambda_stack] . identifier[append] ( identifier[production] )
identifier[new_lambda_stacks] ={ identifier[key] : identifier[new_lambda_stacks] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[new_lambda_stacks] keyword[if] identifier[new_lambda_stacks] [ identifier[key] ]}
keyword[return] identifier[LambdaGrammarStatelet] ( identifier[nonterminal_stack] = identifier[new_stack] ,
identifier[lambda_stacks] = identifier[new_lambda_stacks] ,
identifier[valid_actions] = identifier[self] . identifier[_valid_actions] ,
identifier[context_actions] = identifier[self] . identifier[_context_actions] ,
identifier[is_nonterminal] = identifier[self] . identifier[_is_nonterminal] ) | def take_action(self, production_rule: str) -> 'LambdaGrammarStatelet':
"""
Takes an action in the current grammar state, returning a new grammar state with whatever
updates are necessary. The production rule is assumed to be formatted as "LHS -> RHS".
This will update the non-terminal stack and the context-dependent actions. Updating the
non-terminal stack involves popping the non-terminal that was expanded off of the stack,
then pushing on any non-terminals in the production rule back on the stack. We push the
non-terminals on in `reverse` order, so that the first non-terminal in the production rule
gets popped off the stack first.
For example, if our current ``nonterminal_stack`` is ``["r", "<e,r>", "d"]``, and
``action`` is ``d -> [<e,d>, e]``, the resulting stack will be ``["r", "<e,r>", "e",
"<e,d>"]``.
"""
(left_side, right_side) = production_rule.split(' -> ')
assert self._nonterminal_stack[-1] == left_side, f'Tried to expand {self._nonterminal_stack[-1]}but got rule {left_side} -> {right_side}'
assert all((self._lambda_stacks[key][-1] == left_side for key in self._lambda_stacks))
new_stack = self._nonterminal_stack[:-1]
new_lambda_stacks = {key: self._lambda_stacks[key][:-1] for key in self._lambda_stacks}
productions = self._get_productions_from_string(right_side)
# Looking for lambda productions, but not for cells or columns with the word "lambda" in
# them.
if 'lambda' in productions[0] and 'fb:' not in productions[0]:
production = productions[0]
if production[0] == "'" and production[-1] == "'":
# The production rule with a lambda is typically "<t,d> -> ['lambda x', d]". We
# need to strip the quotes.
production = production[1:-1] # depends on [control=['if'], data=[]]
lambda_variable = production.split(' ')[1]
# The left side must be formatted as "<t,d>", where "t" is the type of the lambda
# variable, and "d" is the return type of the lambda function. We need to pull out the
# "t" here. TODO(mattg): this is pretty limiting, but I'm not sure how general we
# should make this.
if len(left_side) != 5:
raise NotImplementedError("Can't handle this type yet:", left_side) # depends on [control=['if'], data=[]]
lambda_type = left_side[1]
new_lambda_stacks[lambda_type, lambda_variable] = [] # depends on [control=['if'], data=[]]
for production in reversed(productions):
if self._is_nonterminal(production):
new_stack.append(production)
for lambda_stack in new_lambda_stacks.values():
lambda_stack.append(production) # depends on [control=['for'], data=['lambda_stack']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['production']]
# If any of the lambda stacks have now become empty, we remove them from our dictionary.
new_lambda_stacks = {key: new_lambda_stacks[key] for key in new_lambda_stacks if new_lambda_stacks[key]}
return LambdaGrammarStatelet(nonterminal_stack=new_stack, lambda_stacks=new_lambda_stacks, valid_actions=self._valid_actions, context_actions=self._context_actions, is_nonterminal=self._is_nonterminal) |
def read_gpx(xml, gpxns=None):
"""Parse a GPX file into a GpxModel.
Args:
xml: A file-like-object opened in binary mode - that is containing
bytes rather than characters. The root element of the XML should
be a <gpx> element containing a version attribute. GPX versions
1.1 is supported.
gpxns: The XML namespace for GPX in Clarke notation (i.e. delimited
by curly braces). If None, (the default) the namespace used in
the document will be determined automatically.
"""
tree = etree.parse(xml)
gpx_element = tree.getroot()
return parse_gpx(gpx_element, gpxns=gpxns) | def function[read_gpx, parameter[xml, gpxns]]:
constant[Parse a GPX file into a GpxModel.
Args:
xml: A file-like-object opened in binary mode - that is containing
bytes rather than characters. The root element of the XML should
be a <gpx> element containing a version attribute. GPX versions
1.1 is supported.
gpxns: The XML namespace for GPX in Clarke notation (i.e. delimited
by curly braces). If None, (the default) the namespace used in
the document will be determined automatically.
]
variable[tree] assign[=] call[name[etree].parse, parameter[name[xml]]]
variable[gpx_element] assign[=] call[name[tree].getroot, parameter[]]
return[call[name[parse_gpx], parameter[name[gpx_element]]]] | keyword[def] identifier[read_gpx] ( identifier[xml] , identifier[gpxns] = keyword[None] ):
literal[string]
identifier[tree] = identifier[etree] . identifier[parse] ( identifier[xml] )
identifier[gpx_element] = identifier[tree] . identifier[getroot] ()
keyword[return] identifier[parse_gpx] ( identifier[gpx_element] , identifier[gpxns] = identifier[gpxns] ) | def read_gpx(xml, gpxns=None):
"""Parse a GPX file into a GpxModel.
Args:
xml: A file-like-object opened in binary mode - that is containing
bytes rather than characters. The root element of the XML should
be a <gpx> element containing a version attribute. GPX versions
1.1 is supported.
gpxns: The XML namespace for GPX in Clarke notation (i.e. delimited
by curly braces). If None, (the default) the namespace used in
the document will be determined automatically.
"""
tree = etree.parse(xml)
gpx_element = tree.getroot()
return parse_gpx(gpx_element, gpxns=gpxns) |
def name(self, value):
"""
Generate the Site's slug (for file paths, URL's, etc.)
"""
self._name = value
self.slug = re.sub('[^0-9a-zA-Z_-]+', '_', str(value).lower())
self.root = os.path.abspath(os.path.join(_cfg.get('Paths', 'HttpRoot'), self.domain.name, self.slug)) | def function[name, parameter[self, value]]:
constant[
Generate the Site's slug (for file paths, URL's, etc.)
]
name[self]._name assign[=] name[value]
name[self].slug assign[=] call[name[re].sub, parameter[constant[[^0-9a-zA-Z_-]+], constant[_], call[call[name[str], parameter[name[value]]].lower, parameter[]]]]
name[self].root assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[call[name[_cfg].get, parameter[constant[Paths], constant[HttpRoot]]], name[self].domain.name, name[self].slug]]]] | keyword[def] identifier[name] ( identifier[self] , identifier[value] ):
literal[string]
identifier[self] . identifier[_name] = identifier[value]
identifier[self] . identifier[slug] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[str] ( identifier[value] ). identifier[lower] ())
identifier[self] . identifier[root] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[_cfg] . identifier[get] ( literal[string] , literal[string] ), identifier[self] . identifier[domain] . identifier[name] , identifier[self] . identifier[slug] )) | def name(self, value):
"""
Generate the Site's slug (for file paths, URL's, etc.)
"""
self._name = value
self.slug = re.sub('[^0-9a-zA-Z_-]+', '_', str(value).lower())
self.root = os.path.abspath(os.path.join(_cfg.get('Paths', 'HttpRoot'), self.domain.name, self.slug)) |
def _compare_records(src, sink, strict=False):
# type: (MutableMapping[Text, Any], MutableMapping[Text, Any], bool) -> bool
"""Compare two records, ensuring they have compatible fields.
This handles normalizing record names, which will be relative to workflow
step, so that they can be compared.
"""
def _rec_fields(rec): # type: (MutableMapping[Text, Any]) -> MutableMapping[Text, Any]
out = {}
for field in rec["fields"]:
name = shortname(field["name"])
out[name] = field["type"]
return out
srcfields = _rec_fields(src)
sinkfields = _rec_fields(sink)
for key in six.iterkeys(sinkfields):
if (not can_assign_src_to_sink(
srcfields.get(key, "null"), sinkfields.get(key, "null"), strict)
and sinkfields.get(key) is not None):
_logger.info("Record comparison failure for %s and %s\n"
"Did not match fields for %s: %s and %s",
src["name"], sink["name"], key, srcfields.get(key),
sinkfields.get(key))
return False
return True | def function[_compare_records, parameter[src, sink, strict]]:
constant[Compare two records, ensuring they have compatible fields.
This handles normalizing record names, which will be relative to workflow
step, so that they can be compared.
]
def function[_rec_fields, parameter[rec]]:
variable[out] assign[=] dictionary[[], []]
for taget[name[field]] in starred[call[name[rec]][constant[fields]]] begin[:]
variable[name] assign[=] call[name[shortname], parameter[call[name[field]][constant[name]]]]
call[name[out]][name[name]] assign[=] call[name[field]][constant[type]]
return[name[out]]
variable[srcfields] assign[=] call[name[_rec_fields], parameter[name[src]]]
variable[sinkfields] assign[=] call[name[_rec_fields], parameter[name[sink]]]
for taget[name[key]] in starred[call[name[six].iterkeys, parameter[name[sinkfields]]]] begin[:]
if <ast.BoolOp object at 0x7da18eb55ea0> begin[:]
call[name[_logger].info, parameter[constant[Record comparison failure for %s and %s
Did not match fields for %s: %s and %s], call[name[src]][constant[name]], call[name[sink]][constant[name]], name[key], call[name[srcfields].get, parameter[name[key]]], call[name[sinkfields].get, parameter[name[key]]]]]
return[constant[False]]
return[constant[True]] | keyword[def] identifier[_compare_records] ( identifier[src] , identifier[sink] , identifier[strict] = keyword[False] ):
literal[string]
keyword[def] identifier[_rec_fields] ( identifier[rec] ):
identifier[out] ={}
keyword[for] identifier[field] keyword[in] identifier[rec] [ literal[string] ]:
identifier[name] = identifier[shortname] ( identifier[field] [ literal[string] ])
identifier[out] [ identifier[name] ]= identifier[field] [ literal[string] ]
keyword[return] identifier[out]
identifier[srcfields] = identifier[_rec_fields] ( identifier[src] )
identifier[sinkfields] = identifier[_rec_fields] ( identifier[sink] )
keyword[for] identifier[key] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[sinkfields] ):
keyword[if] ( keyword[not] identifier[can_assign_src_to_sink] (
identifier[srcfields] . identifier[get] ( identifier[key] , literal[string] ), identifier[sinkfields] . identifier[get] ( identifier[key] , literal[string] ), identifier[strict] )
keyword[and] identifier[sinkfields] . identifier[get] ( identifier[key] ) keyword[is] keyword[not] keyword[None] ):
identifier[_logger] . identifier[info] ( literal[string]
literal[string] ,
identifier[src] [ literal[string] ], identifier[sink] [ literal[string] ], identifier[key] , identifier[srcfields] . identifier[get] ( identifier[key] ),
identifier[sinkfields] . identifier[get] ( identifier[key] ))
keyword[return] keyword[False]
keyword[return] keyword[True] | def _compare_records(src, sink, strict=False):
# type: (MutableMapping[Text, Any], MutableMapping[Text, Any], bool) -> bool
'Compare two records, ensuring they have compatible fields.\n\n This handles normalizing record names, which will be relative to workflow\n step, so that they can be compared.\n '
def _rec_fields(rec): # type: (MutableMapping[Text, Any]) -> MutableMapping[Text, Any]
out = {}
for field in rec['fields']:
name = shortname(field['name'])
out[name] = field['type'] # depends on [control=['for'], data=['field']]
return out
srcfields = _rec_fields(src)
sinkfields = _rec_fields(sink)
for key in six.iterkeys(sinkfields):
if not can_assign_src_to_sink(srcfields.get(key, 'null'), sinkfields.get(key, 'null'), strict) and sinkfields.get(key) is not None:
_logger.info('Record comparison failure for %s and %s\nDid not match fields for %s: %s and %s', src['name'], sink['name'], key, srcfields.get(key), sinkfields.get(key))
return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return True |
def format_list(data):
""" Remove useless characters to output a clean list."""
if isinstance(data, (list, tuple)):
to_clean = ['[', ']', '(', ')', "'"]
for item in to_clean:
data = str(data).replace("u\"", "\"").replace("u\'", "\'")
data = str(data).replace(item, '')
return data | def function[format_list, parameter[data]]:
constant[ Remove useless characters to output a clean list.]
if call[name[isinstance], parameter[name[data], tuple[[<ast.Name object at 0x7da20e9b19c0>, <ast.Name object at 0x7da20e9b23b0>]]]] begin[:]
variable[to_clean] assign[=] list[[<ast.Constant object at 0x7da20e9b18d0>, <ast.Constant object at 0x7da20e9b1b70>, <ast.Constant object at 0x7da20e9b2320>, <ast.Constant object at 0x7da20e9b1d20>, <ast.Constant object at 0x7da20e9b3b80>]]
for taget[name[item]] in starred[name[to_clean]] begin[:]
variable[data] assign[=] call[call[call[name[str], parameter[name[data]]].replace, parameter[constant[u"], constant["]]].replace, parameter[constant[u'], constant[']]]
variable[data] assign[=] call[call[name[str], parameter[name[data]]].replace, parameter[name[item], constant[]]]
return[name[data]] | keyword[def] identifier[format_list] ( identifier[data] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )):
identifier[to_clean] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[item] keyword[in] identifier[to_clean] :
identifier[data] = identifier[str] ( identifier[data] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
identifier[data] = identifier[str] ( identifier[data] ). identifier[replace] ( identifier[item] , literal[string] )
keyword[return] identifier[data] | def format_list(data):
""" Remove useless characters to output a clean list."""
if isinstance(data, (list, tuple)):
to_clean = ['[', ']', '(', ')', "'"]
for item in to_clean:
data = str(data).replace('u"', '"').replace("u'", "'")
data = str(data).replace(item, '') # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]]
return data |
def location_helper(name, search_only=False):
"""
Location finder by name. If location doesn't exist, create it
and return the href
:param str,Element name: location to resolve. If the location is
by name, it will be retrieved or created, if href, returned
and if Location, href returned. If None, settings an elements
location to None will set it to the Default location.
:param bool search_only: only search for the location, if not found
do not create
:return str href: href of location if search_only is not False
:rtype: str or None
"""
try:
return name.href
except AttributeError:
if name and name.startswith('http'):
return name
except ElementNotFound:
return Location.create(name=name.name).href if not \
search_only else None
# Get all locations; tmp to support earlier 6.x versions.
if name is not None:
locations = [location for location in Search.objects.entry_point(
'location') if location.name == name]
if not locations:
return Location.create(name=name).href if not search_only \
else None
return locations[0].href | def function[location_helper, parameter[name, search_only]]:
constant[
Location finder by name. If location doesn't exist, create it
and return the href
:param str,Element name: location to resolve. If the location is
by name, it will be retrieved or created, if href, returned
and if Location, href returned. If None, settings an elements
location to None will set it to the Default location.
:param bool search_only: only search for the location, if not found
do not create
:return str href: href of location if search_only is not False
:rtype: str or None
]
<ast.Try object at 0x7da1b1ba9330>
if compare[name[name] is_not constant[None]] begin[:]
variable[locations] assign[=] <ast.ListComp object at 0x7da1b1a2a0b0>
if <ast.UnaryOp object at 0x7da1b1a29a50> begin[:]
return[<ast.IfExp object at 0x7da1b1a2a350>]
return[call[name[locations]][constant[0]].href] | keyword[def] identifier[location_helper] ( identifier[name] , identifier[search_only] = keyword[False] ):
literal[string]
keyword[try] :
keyword[return] identifier[name] . identifier[href]
keyword[except] identifier[AttributeError] :
keyword[if] identifier[name] keyword[and] identifier[name] . identifier[startswith] ( literal[string] ):
keyword[return] identifier[name]
keyword[except] identifier[ElementNotFound] :
keyword[return] identifier[Location] . identifier[create] ( identifier[name] = identifier[name] . identifier[name] ). identifier[href] keyword[if] keyword[not] identifier[search_only] keyword[else] keyword[None]
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[locations] =[ identifier[location] keyword[for] identifier[location] keyword[in] identifier[Search] . identifier[objects] . identifier[entry_point] (
literal[string] ) keyword[if] identifier[location] . identifier[name] == identifier[name] ]
keyword[if] keyword[not] identifier[locations] :
keyword[return] identifier[Location] . identifier[create] ( identifier[name] = identifier[name] ). identifier[href] keyword[if] keyword[not] identifier[search_only] keyword[else] keyword[None]
keyword[return] identifier[locations] [ literal[int] ]. identifier[href] | def location_helper(name, search_only=False):
"""
Location finder by name. If location doesn't exist, create it
and return the href
:param str,Element name: location to resolve. If the location is
by name, it will be retrieved or created, if href, returned
and if Location, href returned. If None, settings an elements
location to None will set it to the Default location.
:param bool search_only: only search for the location, if not found
do not create
:return str href: href of location if search_only is not False
:rtype: str or None
"""
try:
return name.href # depends on [control=['try'], data=[]]
except AttributeError:
if name and name.startswith('http'):
return name # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
except ElementNotFound:
return Location.create(name=name.name).href if not search_only else None # depends on [control=['except'], data=[]]
# Get all locations; tmp to support earlier 6.x versions.
if name is not None:
locations = [location for location in Search.objects.entry_point('location') if location.name == name]
if not locations:
return Location.create(name=name).href if not search_only else None # depends on [control=['if'], data=[]]
return locations[0].href # depends on [control=['if'], data=['name']] |
def refine_cand(candsfile, candnum, threshold):
""" Run refinement search for candnum in list_cands with abs(snr) > threshold """
reproduce.refine_cand(candsfile, candnum=candnum, threshold=threshold) | def function[refine_cand, parameter[candsfile, candnum, threshold]]:
constant[ Run refinement search for candnum in list_cands with abs(snr) > threshold ]
call[name[reproduce].refine_cand, parameter[name[candsfile]]] | keyword[def] identifier[refine_cand] ( identifier[candsfile] , identifier[candnum] , identifier[threshold] ):
literal[string]
identifier[reproduce] . identifier[refine_cand] ( identifier[candsfile] , identifier[candnum] = identifier[candnum] , identifier[threshold] = identifier[threshold] ) | def refine_cand(candsfile, candnum, threshold):
""" Run refinement search for candnum in list_cands with abs(snr) > threshold """
reproduce.refine_cand(candsfile, candnum=candnum, threshold=threshold) |
def get_image_platform_digest(self, image, platform):
"""Get digest of specified image and platform
:param ImageName image: image
:param str platform: name of the platform/arch (x86_64, ppc64le, ...)
:raises KeyError: when digest is not found
:rtype: str
:return: digest of the specified image (fedora@sha256:...)
"""
image_digests = self.get_image_digests(image)
digest = image_digests.get(platform)
if digest is None:
raise KeyError(
'Image {} has no digest record for platform {}'.format(image, platform)
)
return digest | def function[get_image_platform_digest, parameter[self, image, platform]]:
constant[Get digest of specified image and platform
:param ImageName image: image
:param str platform: name of the platform/arch (x86_64, ppc64le, ...)
:raises KeyError: when digest is not found
:rtype: str
:return: digest of the specified image (fedora@sha256:...)
]
variable[image_digests] assign[=] call[name[self].get_image_digests, parameter[name[image]]]
variable[digest] assign[=] call[name[image_digests].get, parameter[name[platform]]]
if compare[name[digest] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0e9f5e0>
return[name[digest]] | keyword[def] identifier[get_image_platform_digest] ( identifier[self] , identifier[image] , identifier[platform] ):
literal[string]
identifier[image_digests] = identifier[self] . identifier[get_image_digests] ( identifier[image] )
identifier[digest] = identifier[image_digests] . identifier[get] ( identifier[platform] )
keyword[if] identifier[digest] keyword[is] keyword[None] :
keyword[raise] identifier[KeyError] (
literal[string] . identifier[format] ( identifier[image] , identifier[platform] )
)
keyword[return] identifier[digest] | def get_image_platform_digest(self, image, platform):
"""Get digest of specified image and platform
:param ImageName image: image
:param str platform: name of the platform/arch (x86_64, ppc64le, ...)
:raises KeyError: when digest is not found
:rtype: str
:return: digest of the specified image (fedora@sha256:...)
"""
image_digests = self.get_image_digests(image)
digest = image_digests.get(platform)
if digest is None:
raise KeyError('Image {} has no digest record for platform {}'.format(image, platform)) # depends on [control=['if'], data=[]]
return digest |
def get(self, sid):
"""
Constructs a UserContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
return UserContext(self._version, service_sid=self._solution['service_sid'], sid=sid, ) | def function[get, parameter[self, sid]]:
constant[
Constructs a UserContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
]
return[call[name[UserContext], parameter[name[self]._version]]] | keyword[def] identifier[get] ( identifier[self] , identifier[sid] ):
literal[string]
keyword[return] identifier[UserContext] ( identifier[self] . identifier[_version] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[sid] ,) | def get(self, sid):
"""
Constructs a UserContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.chat.v2.service.user.UserContext
:rtype: twilio.rest.chat.v2.service.user.UserContext
"""
return UserContext(self._version, service_sid=self._solution['service_sid'], sid=sid) |
def _set_colors_for_categorical_obs(adata, value_to_plot, palette):
"""
Sets the adata.uns[value_to_plot + '_colors'] according to the given palette
Parameters
----------
adata
annData object
value_to_plot
name of a valid categorical observation
palette
Palette should be either a valid :func:`~matplotlib.pyplot.colormaps` string,
a list of colors (in a format that can be understood by matplotlib,
eg. RGB, RGBS, hex, or a cycler object with key='color'
Returns
-------
None
"""
from matplotlib.colors import to_hex
from cycler import Cycler, cycler
categories = adata.obs[value_to_plot].cat.categories
# check is palette is a valid matplotlib colormap
if isinstance(palette, str) and palette in pl.colormaps():
# this creates a palette from a colormap. E.g. 'Accent, Dark2, tab20'
cmap = pl.get_cmap(palette)
colors_list = [to_hex(x) for x in cmap(np.linspace(0, 1, len(categories)))]
else:
# check if palette is a list and convert it to a cycler, thus
# it doesnt matter if the list is shorter than the categories length:
if isinstance(palette, abc.Sequence):
if len(palette) < len(categories):
logg.warn("Length of palette colors is smaller than the number of "
"categories (palette length: {}, categories length: {}. "
"Some categories will have the same color."
.format(len(palette), len(categories)))
# check that colors are valid
_color_list = []
for color in palette:
if not is_color_like(color):
# check if the color is a valid R color and translate it
# to a valid hex color value
if color in utils.additional_colors:
color = utils.additional_colors[color]
else:
raise ValueError("The following color value of the given palette is not valid: {}".format(color))
_color_list.append(color)
palette = cycler(color=_color_list)
if not isinstance(palette, Cycler):
raise ValueError("Please check that the value of 'palette' is a "
"valid matplotlib colormap string (eg. Set2), a "
"list of color names or a cycler with a 'color' key.")
if 'color' not in palette.keys:
raise ValueError("Please set the palette key 'color'.")
cc = palette()
colors_list = [to_hex(next(cc)['color']) for x in range(len(categories))]
adata.uns[value_to_plot + '_colors'] = colors_list | def function[_set_colors_for_categorical_obs, parameter[adata, value_to_plot, palette]]:
constant[
Sets the adata.uns[value_to_plot + '_colors'] according to the given palette
Parameters
----------
adata
annData object
value_to_plot
name of a valid categorical observation
palette
Palette should be either a valid :func:`~matplotlib.pyplot.colormaps` string,
a list of colors (in a format that can be understood by matplotlib,
eg. RGB, RGBS, hex, or a cycler object with key='color'
Returns
-------
None
]
from relative_module[matplotlib.colors] import module[to_hex]
from relative_module[cycler] import module[Cycler], module[cycler]
variable[categories] assign[=] call[name[adata].obs][name[value_to_plot]].cat.categories
if <ast.BoolOp object at 0x7da18f58e7a0> begin[:]
variable[cmap] assign[=] call[name[pl].get_cmap, parameter[name[palette]]]
variable[colors_list] assign[=] <ast.ListComp object at 0x7da18f58fbb0>
call[name[adata].uns][binary_operation[name[value_to_plot] + constant[_colors]]] assign[=] name[colors_list] | keyword[def] identifier[_set_colors_for_categorical_obs] ( identifier[adata] , identifier[value_to_plot] , identifier[palette] ):
literal[string]
keyword[from] identifier[matplotlib] . identifier[colors] keyword[import] identifier[to_hex]
keyword[from] identifier[cycler] keyword[import] identifier[Cycler] , identifier[cycler]
identifier[categories] = identifier[adata] . identifier[obs] [ identifier[value_to_plot] ]. identifier[cat] . identifier[categories]
keyword[if] identifier[isinstance] ( identifier[palette] , identifier[str] ) keyword[and] identifier[palette] keyword[in] identifier[pl] . identifier[colormaps] ():
identifier[cmap] = identifier[pl] . identifier[get_cmap] ( identifier[palette] )
identifier[colors_list] =[ identifier[to_hex] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[cmap] ( identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[len] ( identifier[categories] )))]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[palette] , identifier[abc] . identifier[Sequence] ):
keyword[if] identifier[len] ( identifier[palette] )< identifier[len] ( identifier[categories] ):
identifier[logg] . identifier[warn] ( literal[string]
literal[string]
literal[string]
. identifier[format] ( identifier[len] ( identifier[palette] ), identifier[len] ( identifier[categories] )))
identifier[_color_list] =[]
keyword[for] identifier[color] keyword[in] identifier[palette] :
keyword[if] keyword[not] identifier[is_color_like] ( identifier[color] ):
keyword[if] identifier[color] keyword[in] identifier[utils] . identifier[additional_colors] :
identifier[color] = identifier[utils] . identifier[additional_colors] [ identifier[color] ]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[color] ))
identifier[_color_list] . identifier[append] ( identifier[color] )
identifier[palette] = identifier[cycler] ( identifier[color] = identifier[_color_list] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[palette] , identifier[Cycler] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[palette] . identifier[keys] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[cc] = identifier[palette] ()
identifier[colors_list] =[ identifier[to_hex] ( identifier[next] ( identifier[cc] )[ literal[string] ]) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len] ( identifier[categories] ))]
identifier[adata] . identifier[uns] [ identifier[value_to_plot] + literal[string] ]= identifier[colors_list] | def _set_colors_for_categorical_obs(adata, value_to_plot, palette):
"""
Sets the adata.uns[value_to_plot + '_colors'] according to the given palette
Parameters
----------
adata
annData object
value_to_plot
name of a valid categorical observation
palette
Palette should be either a valid :func:`~matplotlib.pyplot.colormaps` string,
a list of colors (in a format that can be understood by matplotlib,
eg. RGB, RGBS, hex, or a cycler object with key='color'
Returns
-------
None
"""
from matplotlib.colors import to_hex
from cycler import Cycler, cycler
categories = adata.obs[value_to_plot].cat.categories
# check is palette is a valid matplotlib colormap
if isinstance(palette, str) and palette in pl.colormaps():
# this creates a palette from a colormap. E.g. 'Accent, Dark2, tab20'
cmap = pl.get_cmap(palette)
colors_list = [to_hex(x) for x in cmap(np.linspace(0, 1, len(categories)))] # depends on [control=['if'], data=[]]
else:
# check if palette is a list and convert it to a cycler, thus
# it doesnt matter if the list is shorter than the categories length:
if isinstance(palette, abc.Sequence):
if len(palette) < len(categories):
logg.warn('Length of palette colors is smaller than the number of categories (palette length: {}, categories length: {}. Some categories will have the same color.'.format(len(palette), len(categories))) # depends on [control=['if'], data=[]]
# check that colors are valid
_color_list = []
for color in palette:
if not is_color_like(color):
# check if the color is a valid R color and translate it
# to a valid hex color value
if color in utils.additional_colors:
color = utils.additional_colors[color] # depends on [control=['if'], data=['color']]
else:
raise ValueError('The following color value of the given palette is not valid: {}'.format(color)) # depends on [control=['if'], data=[]]
_color_list.append(color) # depends on [control=['for'], data=['color']]
palette = cycler(color=_color_list) # depends on [control=['if'], data=[]]
if not isinstance(palette, Cycler):
raise ValueError("Please check that the value of 'palette' is a valid matplotlib colormap string (eg. Set2), a list of color names or a cycler with a 'color' key.") # depends on [control=['if'], data=[]]
if 'color' not in palette.keys:
raise ValueError("Please set the palette key 'color'.") # depends on [control=['if'], data=[]]
cc = palette()
colors_list = [to_hex(next(cc)['color']) for x in range(len(categories))]
adata.uns[value_to_plot + '_colors'] = colors_list |
def checkUserAccess(worksheet, request, redirect=True):
""" Checks if the current user has granted access to the worksheet.
If the user is an analyst without LabManager, LabClerk and
RegulatoryInspector roles and the option 'Allow analysts
only to access to the Worksheets on which they are assigned' is
ticked and the above condition is true, it will redirect to
the main Worksheets view.
Returns False if the user has no access, otherwise returns True
"""
# Deny access to foreign analysts
allowed = worksheet.checkUserAccess()
if allowed == False and redirect == True:
msg = _('You do not have sufficient privileges to view '
'the worksheet ${worksheet_title}.',
mapping={"worksheet_title": worksheet.Title()})
worksheet.plone_utils.addPortalMessage(msg, 'warning')
# Redirect to WS list
portal = getToolByName(worksheet, 'portal_url').getPortalObject()
destination_url = portal.absolute_url() + "/worksheets"
request.response.redirect(destination_url)
return allowed | def function[checkUserAccess, parameter[worksheet, request, redirect]]:
constant[ Checks if the current user has granted access to the worksheet.
If the user is an analyst without LabManager, LabClerk and
RegulatoryInspector roles and the option 'Allow analysts
only to access to the Worksheets on which they are assigned' is
ticked and the above condition is true, it will redirect to
the main Worksheets view.
Returns False if the user has no access, otherwise returns True
]
variable[allowed] assign[=] call[name[worksheet].checkUserAccess, parameter[]]
if <ast.BoolOp object at 0x7da1b2313820> begin[:]
variable[msg] assign[=] call[name[_], parameter[constant[You do not have sufficient privileges to view the worksheet ${worksheet_title}.]]]
call[name[worksheet].plone_utils.addPortalMessage, parameter[name[msg], constant[warning]]]
variable[portal] assign[=] call[call[name[getToolByName], parameter[name[worksheet], constant[portal_url]]].getPortalObject, parameter[]]
variable[destination_url] assign[=] binary_operation[call[name[portal].absolute_url, parameter[]] + constant[/worksheets]]
call[name[request].response.redirect, parameter[name[destination_url]]]
return[name[allowed]] | keyword[def] identifier[checkUserAccess] ( identifier[worksheet] , identifier[request] , identifier[redirect] = keyword[True] ):
literal[string]
identifier[allowed] = identifier[worksheet] . identifier[checkUserAccess] ()
keyword[if] identifier[allowed] == keyword[False] keyword[and] identifier[redirect] == keyword[True] :
identifier[msg] = identifier[_] ( literal[string]
literal[string] ,
identifier[mapping] ={ literal[string] : identifier[worksheet] . identifier[Title] ()})
identifier[worksheet] . identifier[plone_utils] . identifier[addPortalMessage] ( identifier[msg] , literal[string] )
identifier[portal] = identifier[getToolByName] ( identifier[worksheet] , literal[string] ). identifier[getPortalObject] ()
identifier[destination_url] = identifier[portal] . identifier[absolute_url] ()+ literal[string]
identifier[request] . identifier[response] . identifier[redirect] ( identifier[destination_url] )
keyword[return] identifier[allowed] | def checkUserAccess(worksheet, request, redirect=True):
""" Checks if the current user has granted access to the worksheet.
If the user is an analyst without LabManager, LabClerk and
RegulatoryInspector roles and the option 'Allow analysts
only to access to the Worksheets on which they are assigned' is
ticked and the above condition is true, it will redirect to
the main Worksheets view.
Returns False if the user has no access, otherwise returns True
"""
# Deny access to foreign analysts
allowed = worksheet.checkUserAccess()
if allowed == False and redirect == True:
msg = _('You do not have sufficient privileges to view the worksheet ${worksheet_title}.', mapping={'worksheet_title': worksheet.Title()})
worksheet.plone_utils.addPortalMessage(msg, 'warning')
# Redirect to WS list
portal = getToolByName(worksheet, 'portal_url').getPortalObject()
destination_url = portal.absolute_url() + '/worksheets'
request.response.redirect(destination_url) # depends on [control=['if'], data=[]]
return allowed |
def start(self):
""" Start the SSH tunnels """
if self.is_alive:
self.logger.warning('Already started!')
return
self._create_tunnels()
if not self.is_active:
self._raise(BaseSSHTunnelForwarderError,
reason='Could not establish session to SSH gateway')
for _srv in self._server_list:
thread = threading.Thread(
target=self._serve_forever_wrapper,
args=(_srv, ),
name='Srv-{0}'.format(address_to_str(_srv.local_port))
)
thread.daemon = self.daemon_forward_servers
thread.start()
self._check_tunnel(_srv)
self.is_alive = any(self.tunnel_is_up.values())
if not self.is_alive:
self._raise(HandlerSSHTunnelForwarderError,
'An error occurred while opening tunnels.') | def function[start, parameter[self]]:
constant[ Start the SSH tunnels ]
if name[self].is_alive begin[:]
call[name[self].logger.warning, parameter[constant[Already started!]]]
return[None]
call[name[self]._create_tunnels, parameter[]]
if <ast.UnaryOp object at 0x7da1b13aa410> begin[:]
call[name[self]._raise, parameter[name[BaseSSHTunnelForwarderError]]]
for taget[name[_srv]] in starred[name[self]._server_list] begin[:]
variable[thread] assign[=] call[name[threading].Thread, parameter[]]
name[thread].daemon assign[=] name[self].daemon_forward_servers
call[name[thread].start, parameter[]]
call[name[self]._check_tunnel, parameter[name[_srv]]]
name[self].is_alive assign[=] call[name[any], parameter[call[name[self].tunnel_is_up.values, parameter[]]]]
if <ast.UnaryOp object at 0x7da1b13e0f70> begin[:]
call[name[self]._raise, parameter[name[HandlerSSHTunnelForwarderError], constant[An error occurred while opening tunnels.]]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[is_alive] :
identifier[self] . identifier[logger] . identifier[warning] ( literal[string] )
keyword[return]
identifier[self] . identifier[_create_tunnels] ()
keyword[if] keyword[not] identifier[self] . identifier[is_active] :
identifier[self] . identifier[_raise] ( identifier[BaseSSHTunnelForwarderError] ,
identifier[reason] = literal[string] )
keyword[for] identifier[_srv] keyword[in] identifier[self] . identifier[_server_list] :
identifier[thread] = identifier[threading] . identifier[Thread] (
identifier[target] = identifier[self] . identifier[_serve_forever_wrapper] ,
identifier[args] =( identifier[_srv] ,),
identifier[name] = literal[string] . identifier[format] ( identifier[address_to_str] ( identifier[_srv] . identifier[local_port] ))
)
identifier[thread] . identifier[daemon] = identifier[self] . identifier[daemon_forward_servers]
identifier[thread] . identifier[start] ()
identifier[self] . identifier[_check_tunnel] ( identifier[_srv] )
identifier[self] . identifier[is_alive] = identifier[any] ( identifier[self] . identifier[tunnel_is_up] . identifier[values] ())
keyword[if] keyword[not] identifier[self] . identifier[is_alive] :
identifier[self] . identifier[_raise] ( identifier[HandlerSSHTunnelForwarderError] ,
literal[string] ) | def start(self):
""" Start the SSH tunnels """
if self.is_alive:
self.logger.warning('Already started!')
return # depends on [control=['if'], data=[]]
self._create_tunnels()
if not self.is_active:
self._raise(BaseSSHTunnelForwarderError, reason='Could not establish session to SSH gateway') # depends on [control=['if'], data=[]]
for _srv in self._server_list:
thread = threading.Thread(target=self._serve_forever_wrapper, args=(_srv,), name='Srv-{0}'.format(address_to_str(_srv.local_port)))
thread.daemon = self.daemon_forward_servers
thread.start()
self._check_tunnel(_srv) # depends on [control=['for'], data=['_srv']]
self.is_alive = any(self.tunnel_is_up.values())
if not self.is_alive:
self._raise(HandlerSSHTunnelForwarderError, 'An error occurred while opening tunnels.') # depends on [control=['if'], data=[]] |
def augment_reading_list(self, primary_query, augment_query=None, reverse_negate=False):
"""Apply injected logic for slicing reading lists with additional content."""
primary_query = self.validate_query(primary_query)
augment_query = self.get_validated_augment_query(augment_query=augment_query)
try:
# We use this for cases like recent where queries are vague.
if reverse_negate:
primary_query = primary_query.filter(NegateQueryFilter(augment_query))
else:
augment_query = augment_query.filter(NegateQueryFilter(primary_query))
augment_query = randomize_es(augment_query)
return FirstSlotSlicer(primary_query, augment_query)
except TransportError:
return primary_query | def function[augment_reading_list, parameter[self, primary_query, augment_query, reverse_negate]]:
constant[Apply injected logic for slicing reading lists with additional content.]
variable[primary_query] assign[=] call[name[self].validate_query, parameter[name[primary_query]]]
variable[augment_query] assign[=] call[name[self].get_validated_augment_query, parameter[]]
<ast.Try object at 0x7da1b0ac9d50> | keyword[def] identifier[augment_reading_list] ( identifier[self] , identifier[primary_query] , identifier[augment_query] = keyword[None] , identifier[reverse_negate] = keyword[False] ):
literal[string]
identifier[primary_query] = identifier[self] . identifier[validate_query] ( identifier[primary_query] )
identifier[augment_query] = identifier[self] . identifier[get_validated_augment_query] ( identifier[augment_query] = identifier[augment_query] )
keyword[try] :
keyword[if] identifier[reverse_negate] :
identifier[primary_query] = identifier[primary_query] . identifier[filter] ( identifier[NegateQueryFilter] ( identifier[augment_query] ))
keyword[else] :
identifier[augment_query] = identifier[augment_query] . identifier[filter] ( identifier[NegateQueryFilter] ( identifier[primary_query] ))
identifier[augment_query] = identifier[randomize_es] ( identifier[augment_query] )
keyword[return] identifier[FirstSlotSlicer] ( identifier[primary_query] , identifier[augment_query] )
keyword[except] identifier[TransportError] :
keyword[return] identifier[primary_query] | def augment_reading_list(self, primary_query, augment_query=None, reverse_negate=False):
"""Apply injected logic for slicing reading lists with additional content."""
primary_query = self.validate_query(primary_query)
augment_query = self.get_validated_augment_query(augment_query=augment_query)
try:
# We use this for cases like recent where queries are vague.
if reverse_negate:
primary_query = primary_query.filter(NegateQueryFilter(augment_query)) # depends on [control=['if'], data=[]]
else:
augment_query = augment_query.filter(NegateQueryFilter(primary_query))
augment_query = randomize_es(augment_query)
return FirstSlotSlicer(primary_query, augment_query) # depends on [control=['try'], data=[]]
except TransportError:
return primary_query # depends on [control=['except'], data=[]] |
def _fitch_state(self, node, pos):
"""
Determine the Fitch profile for a single character of the node's sequence.
The profile is essentially the intersection between the children's
profiles or, if the former is empty, the union of the profiles.
Parameters
----------
node : PhyloTree.Clade:
Internal node which the profiles are to be determined
pos : int
Position in the node's sequence which the profiles should
be determinedf for.
Returns
-------
state : numpy.array
Fitch profile for the character at position pos of the given node.
"""
state = self._fitch_intersect([k.state[pos] for k in node.clades])
if len(state) == 0:
state = np.concatenate([k.state[pos] for k in node.clades])
return state | def function[_fitch_state, parameter[self, node, pos]]:
constant[
Determine the Fitch profile for a single character of the node's sequence.
The profile is essentially the intersection between the children's
profiles or, if the former is empty, the union of the profiles.
Parameters
----------
node : PhyloTree.Clade:
Internal node which the profiles are to be determined
pos : int
Position in the node's sequence which the profiles should
be determinedf for.
Returns
-------
state : numpy.array
Fitch profile for the character at position pos of the given node.
]
variable[state] assign[=] call[name[self]._fitch_intersect, parameter[<ast.ListComp object at 0x7da1b023c9d0>]]
if compare[call[name[len], parameter[name[state]]] equal[==] constant[0]] begin[:]
variable[state] assign[=] call[name[np].concatenate, parameter[<ast.ListComp object at 0x7da1b023d630>]]
return[name[state]] | keyword[def] identifier[_fitch_state] ( identifier[self] , identifier[node] , identifier[pos] ):
literal[string]
identifier[state] = identifier[self] . identifier[_fitch_intersect] ([ identifier[k] . identifier[state] [ identifier[pos] ] keyword[for] identifier[k] keyword[in] identifier[node] . identifier[clades] ])
keyword[if] identifier[len] ( identifier[state] )== literal[int] :
identifier[state] = identifier[np] . identifier[concatenate] ([ identifier[k] . identifier[state] [ identifier[pos] ] keyword[for] identifier[k] keyword[in] identifier[node] . identifier[clades] ])
keyword[return] identifier[state] | def _fitch_state(self, node, pos):
"""
Determine the Fitch profile for a single character of the node's sequence.
The profile is essentially the intersection between the children's
profiles or, if the former is empty, the union of the profiles.
Parameters
----------
node : PhyloTree.Clade:
Internal node which the profiles are to be determined
pos : int
Position in the node's sequence which the profiles should
be determinedf for.
Returns
-------
state : numpy.array
Fitch profile for the character at position pos of the given node.
"""
state = self._fitch_intersect([k.state[pos] for k in node.clades])
if len(state) == 0:
state = np.concatenate([k.state[pos] for k in node.clades]) # depends on [control=['if'], data=[]]
return state |
def _prepare_vts_results_dir(self, vts):
"""
Given a `VergetTargetSet`, prepare its results dir.
"""
vt_set_results_dir = os.path.join(self.versioned_workdir, 'results', vts.cache_key.hash)
safe_mkdir(vt_set_results_dir)
return vt_set_results_dir | def function[_prepare_vts_results_dir, parameter[self, vts]]:
constant[
Given a `VergetTargetSet`, prepare its results dir.
]
variable[vt_set_results_dir] assign[=] call[name[os].path.join, parameter[name[self].versioned_workdir, constant[results], name[vts].cache_key.hash]]
call[name[safe_mkdir], parameter[name[vt_set_results_dir]]]
return[name[vt_set_results_dir]] | keyword[def] identifier[_prepare_vts_results_dir] ( identifier[self] , identifier[vts] ):
literal[string]
identifier[vt_set_results_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[versioned_workdir] , literal[string] , identifier[vts] . identifier[cache_key] . identifier[hash] )
identifier[safe_mkdir] ( identifier[vt_set_results_dir] )
keyword[return] identifier[vt_set_results_dir] | def _prepare_vts_results_dir(self, vts):
"""
Given a `VergetTargetSet`, prepare its results dir.
"""
vt_set_results_dir = os.path.join(self.versioned_workdir, 'results', vts.cache_key.hash)
safe_mkdir(vt_set_results_dir)
return vt_set_results_dir |
def to_json(self):
""" Method to call to get a serializable object for json.dump or jsonify based on the target
:return: dict
"""
if self.subreference is not None:
return {
"source": self.objectId,
"selector": {
"type": "FragmentSelector",
"conformsTo": "http://ontology-dts.org/terms/subreference",
"value": self.subreference
}
}
else:
return {"source": self.objectId} | def function[to_json, parameter[self]]:
constant[ Method to call to get a serializable object for json.dump or jsonify based on the target
:return: dict
]
if compare[name[self].subreference is_not constant[None]] begin[:]
return[dictionary[[<ast.Constant object at 0x7da1affd6500>, <ast.Constant object at 0x7da1affd64a0>], [<ast.Attribute object at 0x7da1affd64d0>, <ast.Dict object at 0x7da1affd5870>]]] | keyword[def] identifier[to_json] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[subreference] keyword[is] keyword[not] keyword[None] :
keyword[return] {
literal[string] : identifier[self] . identifier[objectId] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[self] . identifier[subreference]
}
}
keyword[else] :
keyword[return] { literal[string] : identifier[self] . identifier[objectId] } | def to_json(self):
""" Method to call to get a serializable object for json.dump or jsonify based on the target
:return: dict
"""
if self.subreference is not None:
return {'source': self.objectId, 'selector': {'type': 'FragmentSelector', 'conformsTo': 'http://ontology-dts.org/terms/subreference', 'value': self.subreference}} # depends on [control=['if'], data=[]]
else:
return {'source': self.objectId} |
def redata(self, *args, **kwargs):
"""Update my ``data`` to match what's in my ``store``"""
select_name = kwargs.get('select_name')
if not self.store:
Clock.schedule_once(self.redata)
return
self.data = list(map(self.munge, enumerate(self._iter_keys())))
if select_name:
self._trigger_select_name(select_name) | def function[redata, parameter[self]]:
constant[Update my ``data`` to match what's in my ``store``]
variable[select_name] assign[=] call[name[kwargs].get, parameter[constant[select_name]]]
if <ast.UnaryOp object at 0x7da18bcc9450> begin[:]
call[name[Clock].schedule_once, parameter[name[self].redata]]
return[None]
name[self].data assign[=] call[name[list], parameter[call[name[map], parameter[name[self].munge, call[name[enumerate], parameter[call[name[self]._iter_keys, parameter[]]]]]]]]
if name[select_name] begin[:]
call[name[self]._trigger_select_name, parameter[name[select_name]]] | keyword[def] identifier[redata] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[select_name] = identifier[kwargs] . identifier[get] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[store] :
identifier[Clock] . identifier[schedule_once] ( identifier[self] . identifier[redata] )
keyword[return]
identifier[self] . identifier[data] = identifier[list] ( identifier[map] ( identifier[self] . identifier[munge] , identifier[enumerate] ( identifier[self] . identifier[_iter_keys] ())))
keyword[if] identifier[select_name] :
identifier[self] . identifier[_trigger_select_name] ( identifier[select_name] ) | def redata(self, *args, **kwargs):
"""Update my ``data`` to match what's in my ``store``"""
select_name = kwargs.get('select_name')
if not self.store:
Clock.schedule_once(self.redata)
return # depends on [control=['if'], data=[]]
self.data = list(map(self.munge, enumerate(self._iter_keys())))
if select_name:
self._trigger_select_name(select_name) # depends on [control=['if'], data=[]] |
def stop_request(self, stop_now=False):
"""Send a stop request to all the daemons
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: True if all daemons are reachable
"""
all_ok = True
for daemon_link in self.all_daemons_links:
logger.debug("Stopping: %s (%s)", daemon_link, stop_now)
if daemon_link == self.arbiter_link:
# I exclude myself from the process, I know we are going to stop ;)
continue
if not daemon_link.active:
# I exclude the daemons that are not active
continue
# Send a stop request to the daemon
try:
stop_ok = daemon_link.stop_request(stop_now=stop_now)
except LinkError:
stop_ok = True
logger.warning("Daemon stop request failed, %s probably stopped!", daemon_link)
all_ok = all_ok and stop_ok
daemon_link.stopping = True
self.stop_request_sent = all_ok
return self.stop_request_sent | def function[stop_request, parameter[self, stop_now]]:
constant[Send a stop request to all the daemons
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: True if all daemons are reachable
]
variable[all_ok] assign[=] constant[True]
for taget[name[daemon_link]] in starred[name[self].all_daemons_links] begin[:]
call[name[logger].debug, parameter[constant[Stopping: %s (%s)], name[daemon_link], name[stop_now]]]
if compare[name[daemon_link] equal[==] name[self].arbiter_link] begin[:]
continue
if <ast.UnaryOp object at 0x7da2041dbb20> begin[:]
continue
<ast.Try object at 0x7da2041da380>
variable[all_ok] assign[=] <ast.BoolOp object at 0x7da2041d9ae0>
name[daemon_link].stopping assign[=] constant[True]
name[self].stop_request_sent assign[=] name[all_ok]
return[name[self].stop_request_sent] | keyword[def] identifier[stop_request] ( identifier[self] , identifier[stop_now] = keyword[False] ):
literal[string]
identifier[all_ok] = keyword[True]
keyword[for] identifier[daemon_link] keyword[in] identifier[self] . identifier[all_daemons_links] :
identifier[logger] . identifier[debug] ( literal[string] , identifier[daemon_link] , identifier[stop_now] )
keyword[if] identifier[daemon_link] == identifier[self] . identifier[arbiter_link] :
keyword[continue]
keyword[if] keyword[not] identifier[daemon_link] . identifier[active] :
keyword[continue]
keyword[try] :
identifier[stop_ok] = identifier[daemon_link] . identifier[stop_request] ( identifier[stop_now] = identifier[stop_now] )
keyword[except] identifier[LinkError] :
identifier[stop_ok] = keyword[True]
identifier[logger] . identifier[warning] ( literal[string] , identifier[daemon_link] )
identifier[all_ok] = identifier[all_ok] keyword[and] identifier[stop_ok]
identifier[daemon_link] . identifier[stopping] = keyword[True]
identifier[self] . identifier[stop_request_sent] = identifier[all_ok]
keyword[return] identifier[self] . identifier[stop_request_sent] | def stop_request(self, stop_now=False):
"""Send a stop request to all the daemons
:param stop_now: stop now or go to stop wait mode
:type stop_now: bool
:return: True if all daemons are reachable
"""
all_ok = True
for daemon_link in self.all_daemons_links:
logger.debug('Stopping: %s (%s)', daemon_link, stop_now)
if daemon_link == self.arbiter_link:
# I exclude myself from the process, I know we are going to stop ;)
continue # depends on [control=['if'], data=[]]
if not daemon_link.active:
# I exclude the daemons that are not active
continue # depends on [control=['if'], data=[]]
# Send a stop request to the daemon
try:
stop_ok = daemon_link.stop_request(stop_now=stop_now) # depends on [control=['try'], data=[]]
except LinkError:
stop_ok = True
logger.warning('Daemon stop request failed, %s probably stopped!', daemon_link) # depends on [control=['except'], data=[]]
all_ok = all_ok and stop_ok
daemon_link.stopping = True # depends on [control=['for'], data=['daemon_link']]
self.stop_request_sent = all_ok
return self.stop_request_sent |
def deactivate(self):
"""When deactivated the :class:`PortEventListener` will not run
anything.
"""
self.event_queue.put(self.TERMINATE_SIGNAL)
self.dispatcher.join()
self.detector.terminate()
self.detector.join() | def function[deactivate, parameter[self]]:
constant[When deactivated the :class:`PortEventListener` will not run
anything.
]
call[name[self].event_queue.put, parameter[name[self].TERMINATE_SIGNAL]]
call[name[self].dispatcher.join, parameter[]]
call[name[self].detector.terminate, parameter[]]
call[name[self].detector.join, parameter[]] | keyword[def] identifier[deactivate] ( identifier[self] ):
literal[string]
identifier[self] . identifier[event_queue] . identifier[put] ( identifier[self] . identifier[TERMINATE_SIGNAL] )
identifier[self] . identifier[dispatcher] . identifier[join] ()
identifier[self] . identifier[detector] . identifier[terminate] ()
identifier[self] . identifier[detector] . identifier[join] () | def deactivate(self):
"""When deactivated the :class:`PortEventListener` will not run
anything.
"""
self.event_queue.put(self.TERMINATE_SIGNAL)
self.dispatcher.join()
self.detector.terminate()
self.detector.join() |
def inverse(self):
"""
Return the inverse of the graph.
@rtype: graph
@return: Complement graph for the graph.
"""
inv = self.__class__()
inv.add_nodes(self.nodes())
inv.complete()
for each in self.edges():
if (inv.has_edge(each)):
inv.del_edge(each)
return inv | def function[inverse, parameter[self]]:
constant[
Return the inverse of the graph.
@rtype: graph
@return: Complement graph for the graph.
]
variable[inv] assign[=] call[name[self].__class__, parameter[]]
call[name[inv].add_nodes, parameter[call[name[self].nodes, parameter[]]]]
call[name[inv].complete, parameter[]]
for taget[name[each]] in starred[call[name[self].edges, parameter[]]] begin[:]
if call[name[inv].has_edge, parameter[name[each]]] begin[:]
call[name[inv].del_edge, parameter[name[each]]]
return[name[inv]] | keyword[def] identifier[inverse] ( identifier[self] ):
literal[string]
identifier[inv] = identifier[self] . identifier[__class__] ()
identifier[inv] . identifier[add_nodes] ( identifier[self] . identifier[nodes] ())
identifier[inv] . identifier[complete] ()
keyword[for] identifier[each] keyword[in] identifier[self] . identifier[edges] ():
keyword[if] ( identifier[inv] . identifier[has_edge] ( identifier[each] )):
identifier[inv] . identifier[del_edge] ( identifier[each] )
keyword[return] identifier[inv] | def inverse(self):
"""
Return the inverse of the graph.
@rtype: graph
@return: Complement graph for the graph.
"""
inv = self.__class__()
inv.add_nodes(self.nodes())
inv.complete()
for each in self.edges():
if inv.has_edge(each):
inv.del_edge(each) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['each']]
return inv |
def push_identity(self, identifiers):
"""
:type identifiers: subject_abcs.IdentifierCollection
"""
if (not identifiers):
msg = ("Specified Subject identifiers cannot be None or empty "
"for 'run as' functionality.")
raise ValueError(msg)
stack = self.get_run_as_identifiers_stack()
if (not stack):
stack = []
stack.append(identifiers)
session = self.get_session()
session.set_internal_attribute(self.run_as_identifiers_session_key, stack) | def function[push_identity, parameter[self, identifiers]]:
constant[
:type identifiers: subject_abcs.IdentifierCollection
]
if <ast.UnaryOp object at 0x7da20e963430> begin[:]
variable[msg] assign[=] constant[Specified Subject identifiers cannot be None or empty for 'run as' functionality.]
<ast.Raise object at 0x7da20e961e40>
variable[stack] assign[=] call[name[self].get_run_as_identifiers_stack, parameter[]]
if <ast.UnaryOp object at 0x7da237d346a0> begin[:]
variable[stack] assign[=] list[[]]
call[name[stack].append, parameter[name[identifiers]]]
variable[session] assign[=] call[name[self].get_session, parameter[]]
call[name[session].set_internal_attribute, parameter[name[self].run_as_identifiers_session_key, name[stack]]] | keyword[def] identifier[push_identity] ( identifier[self] , identifier[identifiers] ):
literal[string]
keyword[if] ( keyword[not] identifier[identifiers] ):
identifier[msg] =( literal[string]
literal[string] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[stack] = identifier[self] . identifier[get_run_as_identifiers_stack] ()
keyword[if] ( keyword[not] identifier[stack] ):
identifier[stack] =[]
identifier[stack] . identifier[append] ( identifier[identifiers] )
identifier[session] = identifier[self] . identifier[get_session] ()
identifier[session] . identifier[set_internal_attribute] ( identifier[self] . identifier[run_as_identifiers_session_key] , identifier[stack] ) | def push_identity(self, identifiers):
"""
:type identifiers: subject_abcs.IdentifierCollection
"""
if not identifiers:
msg = "Specified Subject identifiers cannot be None or empty for 'run as' functionality."
raise ValueError(msg) # depends on [control=['if'], data=[]]
stack = self.get_run_as_identifiers_stack()
if not stack:
stack = [] # depends on [control=['if'], data=[]]
stack.append(identifiers)
session = self.get_session()
session.set_internal_attribute(self.run_as_identifiers_session_key, stack) |
def str_to_date(self):
"""
Returns the date attribute as a date object.
:returns: Date of the status if it exists.
:rtype: date or NoneType
"""
if hasattr(self, 'date'):
return date(*list(map(int, self.date.split('-'))))
else:
return None | def function[str_to_date, parameter[self]]:
constant[
Returns the date attribute as a date object.
:returns: Date of the status if it exists.
:rtype: date or NoneType
]
if call[name[hasattr], parameter[name[self], constant[date]]] begin[:]
return[call[name[date], parameter[<ast.Starred object at 0x7da1b0aa6650>]]] | keyword[def] identifier[str_to_date] ( identifier[self] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return] identifier[date] (* identifier[list] ( identifier[map] ( identifier[int] , identifier[self] . identifier[date] . identifier[split] ( literal[string] ))))
keyword[else] :
keyword[return] keyword[None] | def str_to_date(self):
"""
Returns the date attribute as a date object.
:returns: Date of the status if it exists.
:rtype: date or NoneType
"""
if hasattr(self, 'date'):
return date(*list(map(int, self.date.split('-')))) # depends on [control=['if'], data=[]]
else:
return None |
def update(self, data):
"""Hash data.
"""
view = memoryview(data)
bs = self.block_size
if self._buf:
need = bs - len(self._buf)
if len(view) < need:
self._buf += view.tobytes()
return
self._add_block(self._buf + view[:need].tobytes())
view = view[need:]
while len(view) >= bs:
self._add_block(view[:bs])
view = view[bs:]
self._buf = view.tobytes() | def function[update, parameter[self, data]]:
constant[Hash data.
]
variable[view] assign[=] call[name[memoryview], parameter[name[data]]]
variable[bs] assign[=] name[self].block_size
if name[self]._buf begin[:]
variable[need] assign[=] binary_operation[name[bs] - call[name[len], parameter[name[self]._buf]]]
if compare[call[name[len], parameter[name[view]]] less[<] name[need]] begin[:]
<ast.AugAssign object at 0x7da2046224a0>
return[None]
call[name[self]._add_block, parameter[binary_operation[name[self]._buf + call[call[name[view]][<ast.Slice object at 0x7da18fe90910>].tobytes, parameter[]]]]]
variable[view] assign[=] call[name[view]][<ast.Slice object at 0x7da18fe91db0>]
while compare[call[name[len], parameter[name[view]]] greater_or_equal[>=] name[bs]] begin[:]
call[name[self]._add_block, parameter[call[name[view]][<ast.Slice object at 0x7da18fe93100>]]]
variable[view] assign[=] call[name[view]][<ast.Slice object at 0x7da18fe929e0>]
name[self]._buf assign[=] call[name[view].tobytes, parameter[]] | keyword[def] identifier[update] ( identifier[self] , identifier[data] ):
literal[string]
identifier[view] = identifier[memoryview] ( identifier[data] )
identifier[bs] = identifier[self] . identifier[block_size]
keyword[if] identifier[self] . identifier[_buf] :
identifier[need] = identifier[bs] - identifier[len] ( identifier[self] . identifier[_buf] )
keyword[if] identifier[len] ( identifier[view] )< identifier[need] :
identifier[self] . identifier[_buf] += identifier[view] . identifier[tobytes] ()
keyword[return]
identifier[self] . identifier[_add_block] ( identifier[self] . identifier[_buf] + identifier[view] [: identifier[need] ]. identifier[tobytes] ())
identifier[view] = identifier[view] [ identifier[need] :]
keyword[while] identifier[len] ( identifier[view] )>= identifier[bs] :
identifier[self] . identifier[_add_block] ( identifier[view] [: identifier[bs] ])
identifier[view] = identifier[view] [ identifier[bs] :]
identifier[self] . identifier[_buf] = identifier[view] . identifier[tobytes] () | def update(self, data):
"""Hash data.
"""
view = memoryview(data)
bs = self.block_size
if self._buf:
need = bs - len(self._buf)
if len(view) < need:
self._buf += view.tobytes()
return # depends on [control=['if'], data=[]]
self._add_block(self._buf + view[:need].tobytes())
view = view[need:] # depends on [control=['if'], data=[]]
while len(view) >= bs:
self._add_block(view[:bs])
view = view[bs:] # depends on [control=['while'], data=['bs']]
self._buf = view.tobytes() |
def uri_exists_wait(uri, timeout=300, interval=5, storage_args={}):
"""
Block / waits until URI exists.
:param str uri: URI to check existence
:param float timeout: Number of seconds before timing out
:param float interval: Calls :func:`uri_exists` every ``interval`` seconds
:param dict storage_args: Keyword arguments to pass to the underlying storage object
:returns: ``True`` if URI exists
:rtype: bool
"""
uri_obj = get_uri_obj(uri, storage_args)
start_time = time.time()
while time.time() - start_time < timeout:
if uri_obj.exists(): return True
time.sleep(interval)
#end while
if uri_exists(uri): return True
return False | def function[uri_exists_wait, parameter[uri, timeout, interval, storage_args]]:
constant[
Block / waits until URI exists.
:param str uri: URI to check existence
:param float timeout: Number of seconds before timing out
:param float interval: Calls :func:`uri_exists` every ``interval`` seconds
:param dict storage_args: Keyword arguments to pass to the underlying storage object
:returns: ``True`` if URI exists
:rtype: bool
]
variable[uri_obj] assign[=] call[name[get_uri_obj], parameter[name[uri], name[storage_args]]]
variable[start_time] assign[=] call[name[time].time, parameter[]]
while compare[binary_operation[call[name[time].time, parameter[]] - name[start_time]] less[<] name[timeout]] begin[:]
if call[name[uri_obj].exists, parameter[]] begin[:]
return[constant[True]]
call[name[time].sleep, parameter[name[interval]]]
if call[name[uri_exists], parameter[name[uri]]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[uri_exists_wait] ( identifier[uri] , identifier[timeout] = literal[int] , identifier[interval] = literal[int] , identifier[storage_args] ={}):
literal[string]
identifier[uri_obj] = identifier[get_uri_obj] ( identifier[uri] , identifier[storage_args] )
identifier[start_time] = identifier[time] . identifier[time] ()
keyword[while] identifier[time] . identifier[time] ()- identifier[start_time] < identifier[timeout] :
keyword[if] identifier[uri_obj] . identifier[exists] (): keyword[return] keyword[True]
identifier[time] . identifier[sleep] ( identifier[interval] )
keyword[if] identifier[uri_exists] ( identifier[uri] ): keyword[return] keyword[True]
keyword[return] keyword[False] | def uri_exists_wait(uri, timeout=300, interval=5, storage_args={}):
"""
Block / waits until URI exists.
:param str uri: URI to check existence
:param float timeout: Number of seconds before timing out
:param float interval: Calls :func:`uri_exists` every ``interval`` seconds
:param dict storage_args: Keyword arguments to pass to the underlying storage object
:returns: ``True`` if URI exists
:rtype: bool
"""
uri_obj = get_uri_obj(uri, storage_args)
start_time = time.time()
while time.time() - start_time < timeout:
if uri_obj.exists():
return True # depends on [control=['if'], data=[]]
time.sleep(interval) # depends on [control=['while'], data=[]]
#end while
if uri_exists(uri):
return True # depends on [control=['if'], data=[]]
return False |
def render_to_json(self, context, **response_kwargs):
"""
Returns a JSON response, transforming 'context' to make the payload.
"""
response_kwargs['content_type'] = 'application/json'
return self.response_class(
self.convert_context_to_json(context),
**response_kwargs
) | def function[render_to_json, parameter[self, context]]:
constant[
Returns a JSON response, transforming 'context' to make the payload.
]
call[name[response_kwargs]][constant[content_type]] assign[=] constant[application/json]
return[call[name[self].response_class, parameter[call[name[self].convert_context_to_json, parameter[name[context]]]]]] | keyword[def] identifier[render_to_json] ( identifier[self] , identifier[context] ,** identifier[response_kwargs] ):
literal[string]
identifier[response_kwargs] [ literal[string] ]= literal[string]
keyword[return] identifier[self] . identifier[response_class] (
identifier[self] . identifier[convert_context_to_json] ( identifier[context] ),
** identifier[response_kwargs]
) | def render_to_json(self, context, **response_kwargs):
"""
Returns a JSON response, transforming 'context' to make the payload.
"""
response_kwargs['content_type'] = 'application/json'
return self.response_class(self.convert_context_to_json(context), **response_kwargs) |
def crypto_validator(func):
"""
This a decorator to be used for any method relying on the cryptography library. # noqa: E501
Its behaviour depends on the 'crypto_valid' attribute of the global 'conf'.
"""
def func_in(*args, **kwargs):
if not conf.crypto_valid:
raise ImportError("Cannot execute crypto-related method! "
"Please install python-cryptography v1.7 or later.") # noqa: E501
return func(*args, **kwargs)
return func_in | def function[crypto_validator, parameter[func]]:
constant[
This a decorator to be used for any method relying on the cryptography library. # noqa: E501
Its behaviour depends on the 'crypto_valid' attribute of the global 'conf'.
]
def function[func_in, parameter[]]:
if <ast.UnaryOp object at 0x7da1b21d45e0> begin[:]
<ast.Raise object at 0x7da1b21d6fb0>
return[call[name[func], parameter[<ast.Starred object at 0x7da1b1f942b0>]]]
return[name[func_in]] | keyword[def] identifier[crypto_validator] ( identifier[func] ):
literal[string]
keyword[def] identifier[func_in] (* identifier[args] ,** identifier[kwargs] ):
keyword[if] keyword[not] identifier[conf] . identifier[crypto_valid] :
keyword[raise] identifier[ImportError] ( literal[string]
literal[string] )
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[func_in] | def crypto_validator(func):
"""
This a decorator to be used for any method relying on the cryptography library. # noqa: E501
Its behaviour depends on the 'crypto_valid' attribute of the global 'conf'.
"""
def func_in(*args, **kwargs):
if not conf.crypto_valid:
raise ImportError('Cannot execute crypto-related method! Please install python-cryptography v1.7 or later.') # noqa: E501 # depends on [control=['if'], data=[]]
return func(*args, **kwargs)
return func_in |
def enable(states):
'''
Enable state function or sls run
CLI Example:
.. code-block:: bash
salt '*' state.enable highstate
salt '*' state.enable test.succeed_without_changes
.. note::
To enable a state file from running provide the same name that would
be passed in a state.sls call.
salt '*' state.disable bind.config
'''
ret = {
'res': True,
'msg': ''
}
states = salt.utils.args.split_input(states)
log.debug('states %s', states)
msg = []
_disabled = __salt__['grains.get']('state_runs_disabled')
if not isinstance(_disabled, list):
_disabled = []
_changed = False
for _state in states:
log.debug('_state %s', _state)
if _state not in _disabled:
msg.append('Info: {0} state already enabled.'.format(_state))
else:
msg.append('Info: {0} state enabled.'.format(_state))
_disabled.remove(_state)
_changed = True
if _changed:
__salt__['grains.setval']('state_runs_disabled', _disabled)
ret['msg'] = '\n'.join(msg)
# refresh the grains
__salt__['saltutil.refresh_modules']()
return ret | def function[enable, parameter[states]]:
constant[
Enable state function or sls run
CLI Example:
.. code-block:: bash
salt '*' state.enable highstate
salt '*' state.enable test.succeed_without_changes
.. note::
To enable a state file from running provide the same name that would
be passed in a state.sls call.
salt '*' state.disable bind.config
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c223e0>, <ast.Constant object at 0x7da1b1c204f0>], [<ast.Constant object at 0x7da1b210b580>, <ast.Constant object at 0x7da1b210b040>]]
variable[states] assign[=] call[name[salt].utils.args.split_input, parameter[name[states]]]
call[name[log].debug, parameter[constant[states %s], name[states]]]
variable[msg] assign[=] list[[]]
variable[_disabled] assign[=] call[call[name[__salt__]][constant[grains.get]], parameter[constant[state_runs_disabled]]]
if <ast.UnaryOp object at 0x7da1b2000610> begin[:]
variable[_disabled] assign[=] list[[]]
variable[_changed] assign[=] constant[False]
for taget[name[_state]] in starred[name[states]] begin[:]
call[name[log].debug, parameter[constant[_state %s], name[_state]]]
if compare[name[_state] <ast.NotIn object at 0x7da2590d7190> name[_disabled]] begin[:]
call[name[msg].append, parameter[call[constant[Info: {0} state already enabled.].format, parameter[name[_state]]]]]
if name[_changed] begin[:]
call[call[name[__salt__]][constant[grains.setval]], parameter[constant[state_runs_disabled], name[_disabled]]]
call[name[ret]][constant[msg]] assign[=] call[constant[
].join, parameter[name[msg]]]
call[call[name[__salt__]][constant[saltutil.refresh_modules]], parameter[]]
return[name[ret]] | keyword[def] identifier[enable] ( identifier[states] ):
literal[string]
identifier[ret] ={
literal[string] : keyword[True] ,
literal[string] : literal[string]
}
identifier[states] = identifier[salt] . identifier[utils] . identifier[args] . identifier[split_input] ( identifier[states] )
identifier[log] . identifier[debug] ( literal[string] , identifier[states] )
identifier[msg] =[]
identifier[_disabled] = identifier[__salt__] [ literal[string] ]( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[_disabled] , identifier[list] ):
identifier[_disabled] =[]
identifier[_changed] = keyword[False]
keyword[for] identifier[_state] keyword[in] identifier[states] :
identifier[log] . identifier[debug] ( literal[string] , identifier[_state] )
keyword[if] identifier[_state] keyword[not] keyword[in] identifier[_disabled] :
identifier[msg] . identifier[append] ( literal[string] . identifier[format] ( identifier[_state] ))
keyword[else] :
identifier[msg] . identifier[append] ( literal[string] . identifier[format] ( identifier[_state] ))
identifier[_disabled] . identifier[remove] ( identifier[_state] )
identifier[_changed] = keyword[True]
keyword[if] identifier[_changed] :
identifier[__salt__] [ literal[string] ]( literal[string] , identifier[_disabled] )
identifier[ret] [ literal[string] ]= literal[string] . identifier[join] ( identifier[msg] )
identifier[__salt__] [ literal[string] ]()
keyword[return] identifier[ret] | def enable(states):
"""
Enable state function or sls run
CLI Example:
.. code-block:: bash
salt '*' state.enable highstate
salt '*' state.enable test.succeed_without_changes
.. note::
To enable a state file from running provide the same name that would
be passed in a state.sls call.
salt '*' state.disable bind.config
"""
ret = {'res': True, 'msg': ''}
states = salt.utils.args.split_input(states)
log.debug('states %s', states)
msg = []
_disabled = __salt__['grains.get']('state_runs_disabled')
if not isinstance(_disabled, list):
_disabled = [] # depends on [control=['if'], data=[]]
_changed = False
for _state in states:
log.debug('_state %s', _state)
if _state not in _disabled:
msg.append('Info: {0} state already enabled.'.format(_state)) # depends on [control=['if'], data=['_state']]
else:
msg.append('Info: {0} state enabled.'.format(_state))
_disabled.remove(_state)
_changed = True # depends on [control=['for'], data=['_state']]
if _changed:
__salt__['grains.setval']('state_runs_disabled', _disabled) # depends on [control=['if'], data=[]]
ret['msg'] = '\n'.join(msg)
# refresh the grains
__salt__['saltutil.refresh_modules']()
return ret |
def atoms(lines):
"""Parse atom block into atom objects
Returns:
dict: networkx nodes
"""
# Convert sdf style charge to actual charge
conv_charge_table = {0: 0, 1: 3, 2: 2, 3: 1, 4: 0, 5: -1, 6: -2, 7: -3}
results = {}
for i, line in enumerate(lines):
symbol = line[31:34].rstrip()
try:
atom = Atom(symbol)
except KeyError:
raise ValueError(symbol)
xpos = float(line[0:10])
ypos = float(line[10:20])
zpos = float(line[20:30])
atom.coords = (xpos, ypos, zpos)
atom.mass_diff = int(line[34:37])
old_sdf_charge = int(line[37:40])
atom.charge = conv_charge_table[old_sdf_charge]
if old_sdf_charge == 4:
atom.radical = 1
# atom.stereo_flag = int(line[40:43]) # Not used
# valence = int(line[46:49])
# if valence:
# atom.valence = valence
results[i + 1] = {"atom": atom}
return results | def function[atoms, parameter[lines]]:
constant[Parse atom block into atom objects
Returns:
dict: networkx nodes
]
variable[conv_charge_table] assign[=] dictionary[[<ast.Constant object at 0x7da1b236c310>, <ast.Constant object at 0x7da1b236c1c0>, <ast.Constant object at 0x7da1b236c2e0>, <ast.Constant object at 0x7da1b236c070>, <ast.Constant object at 0x7da1b236c280>, <ast.Constant object at 0x7da1b236c0d0>, <ast.Constant object at 0x7da1b236c040>, <ast.Constant object at 0x7da1b236c130>], [<ast.Constant object at 0x7da1b24ef280>, <ast.Constant object at 0x7da1b24ec850>, <ast.Constant object at 0x7da1b24efac0>, <ast.Constant object at 0x7da1b24eeaa0>, <ast.Constant object at 0x7da1b24ec970>, <ast.UnaryOp object at 0x7da1b24eca00>, <ast.UnaryOp object at 0x7da1b24ef8b0>, <ast.UnaryOp object at 0x7da1b24ee920>]]
variable[results] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b24ef460>, <ast.Name object at 0x7da1b24eec80>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:]
variable[symbol] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b24ee8c0>].rstrip, parameter[]]
<ast.Try object at 0x7da1b24ef100>
variable[xpos] assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b24c2560>]]]
variable[ypos] assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b2390ee0>]]]
variable[zpos] assign[=] call[name[float], parameter[call[name[line]][<ast.Slice object at 0x7da1b2390760>]]]
name[atom].coords assign[=] tuple[[<ast.Name object at 0x7da1b23904f0>, <ast.Name object at 0x7da1b2392170>, <ast.Name object at 0x7da1b2393bb0>]]
name[atom].mass_diff assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b23932b0>]]]
variable[old_sdf_charge] assign[=] call[name[int], parameter[call[name[line]][<ast.Slice object at 0x7da1b2393f10>]]]
name[atom].charge assign[=] call[name[conv_charge_table]][name[old_sdf_charge]]
if compare[name[old_sdf_charge] equal[==] constant[4]] begin[:]
name[atom].radical assign[=] constant[1]
call[name[results]][binary_operation[name[i] + constant[1]]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2391180>], [<ast.Name object at 0x7da1b2393f40>]]
return[name[results]] | keyword[def] identifier[atoms] ( identifier[lines] ):
literal[string]
identifier[conv_charge_table] ={ literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] :- literal[int] , literal[int] :- literal[int] , literal[int] :- literal[int] }
identifier[results] ={}
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[lines] ):
identifier[symbol] = identifier[line] [ literal[int] : literal[int] ]. identifier[rstrip] ()
keyword[try] :
identifier[atom] = identifier[Atom] ( identifier[symbol] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ValueError] ( identifier[symbol] )
identifier[xpos] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ])
identifier[ypos] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ])
identifier[zpos] = identifier[float] ( identifier[line] [ literal[int] : literal[int] ])
identifier[atom] . identifier[coords] =( identifier[xpos] , identifier[ypos] , identifier[zpos] )
identifier[atom] . identifier[mass_diff] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ])
identifier[old_sdf_charge] = identifier[int] ( identifier[line] [ literal[int] : literal[int] ])
identifier[atom] . identifier[charge] = identifier[conv_charge_table] [ identifier[old_sdf_charge] ]
keyword[if] identifier[old_sdf_charge] == literal[int] :
identifier[atom] . identifier[radical] = literal[int]
identifier[results] [ identifier[i] + literal[int] ]={ literal[string] : identifier[atom] }
keyword[return] identifier[results] | def atoms(lines):
"""Parse atom block into atom objects
Returns:
dict: networkx nodes
"""
# Convert sdf style charge to actual charge
conv_charge_table = {0: 0, 1: 3, 2: 2, 3: 1, 4: 0, 5: -1, 6: -2, 7: -3}
results = {}
for (i, line) in enumerate(lines):
symbol = line[31:34].rstrip()
try:
atom = Atom(symbol) # depends on [control=['try'], data=[]]
except KeyError:
raise ValueError(symbol) # depends on [control=['except'], data=[]]
xpos = float(line[0:10])
ypos = float(line[10:20])
zpos = float(line[20:30])
atom.coords = (xpos, ypos, zpos)
atom.mass_diff = int(line[34:37])
old_sdf_charge = int(line[37:40])
atom.charge = conv_charge_table[old_sdf_charge]
if old_sdf_charge == 4:
atom.radical = 1 # depends on [control=['if'], data=[]]
# atom.stereo_flag = int(line[40:43]) # Not used
# valence = int(line[46:49])
# if valence:
# atom.valence = valence
results[i + 1] = {'atom': atom} # depends on [control=['for'], data=[]]
return results |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.