signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def facilityMsToNet ( SsVersionIndicator_presence = 0 ) :
"""FACILITY Section 9.3.9.2""" | a = TpPd ( pd = 0x3 )
b = MessageType ( mesType = 0x3a )
# 00111010
c = Facility ( )
packet = a / b / c
if SsVersionIndicator_presence is 1 :
d = SsVersionIndicatorHdr ( ieiSVI = 0x7F , eightBitSVI = 0x0 )
packet = packet / d
return packet |
def changeSize ( self , newsize ) :
"""Changes the size of the layer . Should only be called through
Network . changeLayerSize ( ) .""" | # overwrites current data
if newsize <= 0 :
raise LayerError ( 'Layer size changed to zero.' , newsize )
minSize = min ( self . size , newsize )
bias = randomArray ( newsize , self . _maxRandom )
Numeric . put ( bias , Numeric . arange ( minSize ) , self . weight )
self . weight = bias
self . size = newsize
self . displayWidth = newsize
self . targetSet = 0
self . activationSet = 0
self . target = Numeric . zeros ( self . size , 'f' )
self . error = Numeric . zeros ( self . size , 'f' )
self . activation = Numeric . zeros ( self . size , 'f' )
self . dweight = Numeric . zeros ( self . size , 'f' )
self . delta = Numeric . zeros ( self . size , 'f' )
self . netinput = Numeric . zeros ( self . size , 'f' )
self . wed = Numeric . zeros ( self . size , 'f' )
self . wedLast = Numeric . zeros ( self . size , 'f' ) |
def use_categories_as_metadata_and_replace_terms ( self ) :
'''Returns a TermDocMatrix which is identical to self except the metadata values are now identical to the
categories present and term - doc - matrix is now the metadata matrix .
: return : TermDocMatrix''' | new_metadata_factory = CSRMatrixFactory ( )
for i , category_idx in enumerate ( self . get_category_ids ( ) ) :
new_metadata_factory [ i , category_idx ] = 1
new_metadata = new_metadata_factory . get_csr_matrix ( )
new_tdm = self . _make_new_term_doc_matrix ( self . _mX , new_metadata , self . _y , self . _metadata_idx_store , self . _category_idx_store , copy ( self . _category_idx_store ) , self . _y == self . _y )
return new_tdm |
def _RemoveForwardedIps ( self , forwarded_ips , interface ) :
"""Remove the forwarded IP addresses from the network interface .
Args :
forwarded _ ips : list , the forwarded IP address strings to delete .
interface : string , the output device to use .""" | for address in forwarded_ips :
self . ip_forwarding_utils . RemoveForwardedIp ( address , interface ) |
def run_with_router ( func , * args , ** kwargs ) :
"""Arrange for ` func ( router , * args , * * kwargs ) ` to run with a temporary
: class : ` mitogen . master . Router ` , ensuring the Router and Broker are
correctly shut down during normal or exceptional return .
: returns :
` func ` ' s return value .""" | broker = mitogen . master . Broker ( )
router = mitogen . master . Router ( broker )
try :
return func ( router , * args , ** kwargs )
finally :
broker . shutdown ( )
broker . join ( ) |
def _from_dict ( cls , _dict ) :
"""Initialize a ValueCollection object from a json dictionary .""" | args = { }
if 'values' in _dict :
args [ 'values' ] = [ Value . _from_dict ( x ) for x in ( _dict . get ( 'values' ) ) ]
else :
raise ValueError ( 'Required property \'values\' not present in ValueCollection JSON' )
if 'pagination' in _dict :
args [ 'pagination' ] = Pagination . _from_dict ( _dict . get ( 'pagination' ) )
else :
raise ValueError ( 'Required property \'pagination\' not present in ValueCollection JSON' )
return cls ( ** args ) |
def fix_whitespace ( line , offset , replacement ) :
"""Replace whitespace at offset and return fixed line .""" | # Replace escaped newlines too
left = line [ : offset ] . rstrip ( '\n\r \t\\' )
right = line [ offset : ] . lstrip ( '\n\r \t\\' )
if right . startswith ( '#' ) :
return line
else :
return left + replacement + right |
def _state_run ( self ) :
'''Execute a state run based on information set in the minion config file''' | if self . opts [ 'startup_states' ] :
if self . opts . get ( 'master_type' , 'str' ) == 'disable' and self . opts . get ( 'file_client' , 'remote' ) == 'remote' :
log . warning ( 'Cannot run startup_states when \'master_type\' is set ' 'to \'disable\' and \'file_client\' is set to ' '\'remote\'. Skipping.' )
else :
data = { 'jid' : 'req' , 'ret' : self . opts . get ( 'ext_job_cache' , '' ) }
if self . opts [ 'startup_states' ] == 'sls' :
data [ 'fun' ] = 'state.sls'
data [ 'arg' ] = [ self . opts [ 'sls_list' ] ]
elif self . opts [ 'startup_states' ] == 'top' :
data [ 'fun' ] = 'state.top'
data [ 'arg' ] = [ self . opts [ 'top_file' ] ]
else :
data [ 'fun' ] = 'state.highstate'
data [ 'arg' ] = [ ]
self . _handle_decoded_payload ( data ) |
def asmono ( samples : np . ndarray , channel : Union [ int , str ] = 0 ) -> np . ndarray :
"""convert samples to mono if they are not mono already .
The returned array will always have the shape ( numframes , )
channel : the channel number to use , or ' mix ' to mix - down
all channels""" | if numchannels ( samples ) == 1 : # it could be [ 1,2,3,4 , . . . ] , or [ [ 1 ] , [ 2 ] , [ 3 ] , [ 4 ] , . . . ]
if isinstance ( samples [ 0 ] , float ) :
return samples
elif isinstance ( samples [ 0 ] , np . dnarray ) :
return np . reshape ( samples , ( len ( samples ) , ) )
else :
raise TypeError ( "Samples should be numeric, found: %s" % str ( type ( samples [ 0 ] ) ) )
if isinstance ( channel , int ) :
return samples [ : , channel ]
elif channel == 'mix' :
return _mix ( samples , scale_by_numchannels = True )
else :
raise ValueError ( "channel has to be an integer indicating a channel," " or 'mix' to mix down all channels" ) |
def Rz_matrix ( theta ) :
"""Rotation matrix around the Z axis""" | return np . array ( [ [ np . cos ( theta ) , - np . sin ( theta ) , 0 ] , [ np . sin ( theta ) , np . cos ( theta ) , 0 ] , [ 0 , 0 , 1 ] ] ) |
def _maybe_pack_examples ( self , generator ) :
"""Wraps generator with packer if self . packed _ length .""" | if not self . packed_length :
return generator
return generator_utils . pack_examples ( generator , self . has_inputs , self . packed_length , spacing = self . packed_spacing , chop_long_sequences = not self . has_inputs ) |
def render_page ( path = None , user_content = False , context = None , username = None , password = None , render_offline = False , render_wide = False , render_inline = False , api_url = None , title = None , text = None , quiet = None , grip_class = None ) :
"""Renders the specified markup text to an HTML page and returns it .""" | return create_app ( path , user_content , context , username , password , render_offline , render_wide , render_inline , api_url , title , text , False , quiet , grip_class ) . render ( ) |
def click ( self , x , y ) :
'''Simulate click operation
Args :
- x ( int ) : position of x
- y ( int ) : position of y
Returns :
self''' | self . _run_nowait ( 'target.tap({x: %d, y: %d})' % ( x / self . _scale , y / self . _scale ) )
return self |
def get_reminder ( self , reminder_key ) :
'''Gets one reminder
Args :
reminder _ keykey for the reminder to get
return ( status code , reminder dict )''' | # required sanity check
if reminder_key :
return requests . codes . bad_request , None
uri = '/' . join ( [ self . api_uri , self . reminders_suffix , reminder_key ] )
return self . _req ( 'get' , uri ) |
def get_choices ( module_name ) :
"""Retrieve members from ` ` module _ name ` ` ' s ` ` _ _ all _ _ ` ` list .
: rtype : list""" | try :
module = importlib . import_module ( module_name )
if hasattr ( module , '__all__' ) :
return module . __all__
else :
return [ name for name , _ in inspect . getmembers ( module , inspect . isclass ) if name != "device" ]
except ImportError :
return [ ] |
async def reconnect ( self ) :
"""断线重连 .""" | self . clean ( )
try :
self . writer . close ( )
except :
pass
self . closed = True
await self . connect ( )
if self . debug :
print ( "reconnect to {}" . format ( ( self . hostname , self . port ) ) ) |
def auth ( self ) :
"""Authenticate with the miner and obtain a JSON web token ( JWT ) .""" | response = requests . post ( parse . urljoin ( self . base_url , '/api/auth' ) , timeout = self . timeout , data = { 'username' : self . username , 'password' : self . password } )
response . raise_for_status ( )
json = response . json ( )
if 'jwt' not in json :
raise ValueError ( "Not authorized: didn't receive token, check username or password." )
self . jwt = json [ 'jwt' ]
return json |
def get_upstream_fork_point ( self ) :
"""Get the most recent ancestor of HEAD that occurs on an upstream
branch .
First looks at the current branch ' s tracking branch , if applicable . If
that doesn ' t work , looks at every other branch to find the most recent
ancestor of HEAD that occurs on a tracking branch .
Returns :
git . Commit object or None""" | possible_relatives = [ ]
try :
if not self . repo :
return None
try :
active_branch = self . repo . active_branch
except ( TypeError , ValueError ) :
logger . debug ( "git is in a detached head state" )
return None
# detached head
else :
tracking_branch = active_branch . tracking_branch ( )
if tracking_branch :
possible_relatives . append ( tracking_branch . commit )
if not possible_relatives :
for branch in self . repo . branches :
tracking_branch = branch . tracking_branch ( )
if tracking_branch is not None :
possible_relatives . append ( tracking_branch . commit )
head = self . repo . head
most_recent_ancestor = None
for possible_relative in possible_relatives : # at most one :
for ancestor in self . repo . merge_base ( head , possible_relative ) :
if most_recent_ancestor is None :
most_recent_ancestor = ancestor
elif self . repo . is_ancestor ( most_recent_ancestor , ancestor ) :
most_recent_ancestor = ancestor
return most_recent_ancestor
except exc . GitCommandError as e :
logger . debug ( "git remote upstream fork point could not be found" )
logger . debug ( e . message )
return None |
def _parse_spacy_kwargs ( ** kwargs ) :
"""Supported args include :
Args :
n _ threads / num _ threads : Number of threads to use . Uses num _ cpus - 1 by default .
batch _ size : The number of texts to accumulate into a common working set before processing .
( Default value : 1000)""" | n_threads = kwargs . get ( 'n_threads' ) or kwargs . get ( 'num_threads' )
batch_size = kwargs . get ( 'batch_size' )
if n_threads is None or n_threads is - 1 :
n_threads = cpu_count ( ) - 1
if batch_size is None or batch_size is - 1 :
batch_size = 1000
return n_threads , batch_size |
def _pigpio_aio_command_ext ( self , cmd , p1 , p2 , p3 , extents , rl = True ) :
"""Runs an extended pigpio socket command .
sl : = command socket and lock .
cmd : = the command to be executed .
p1 : = command parameter 1 ( if applicable ) .
p2 : = command parameter 2 ( if applicable ) .
p3 : = total size in bytes of following extents
extents : = additional data blocks""" | with ( yield from self . _lock ) :
ext = bytearray ( struct . pack ( 'IIII' , cmd , p1 , p2 , p3 ) )
for x in extents :
if isinstance ( x , str ) :
ext . extend ( _b ( x ) )
else :
ext . extend ( x )
self . _loop . sock_sendall ( self . s , ext )
response = yield from self . _loop . sock_recv ( self . s , 16 )
_ , res = struct . unpack ( '12sI' , response )
return res |
def fit ( self , y , exogenous = None ) :
"""Fit the transformer
Learns the value of ` ` lmbda ` ` , if not specified in the constructor .
If defined in the constructor , is not re - learned .
Parameters
y : array - like or None , shape = ( n _ samples , )
The endogenous ( time - series ) array .
exogenous : array - like or None , shape = ( n _ samples , n _ features ) , optional
The exogenous array of additional covariates . Not used for
endogenous transformers . Default is None , and non - None values will
serve as pass - through arrays .""" | lam1 = self . lmbda
lam2 = self . lmbda2
if lam2 < 0 :
raise ValueError ( "lmbda2 must be a non-negative scalar value" )
if lam1 is None :
y , _ = self . _check_y_exog ( y , exogenous )
_ , lam1 = stats . boxcox ( y , lmbda = None , alpha = None )
self . lam1_ = lam1
self . lam2_ = lam2
return self |
def projects ( self , term , field = None , ** kwargs ) :
"""Search for projects . Defaults to project _ title . Other fields
are :
project _ reference
project _ abstract
Args :
term ( str ) : Term to search for .
kwargs ( dict ) : additional keywords passed into
requests . session . get params keyword .""" | params = kwargs
params [ 'q' ] = term
if field :
params [ 'f' ] = self . _FIELD_MAP [ field ]
else :
params [ 'f' ] = 'pro.t'
baseuri = self . _BASE_URI + 'projects'
res = self . session . get ( baseuri , params = params )
self . handle_http_error ( res )
return res |
def addClassPath ( path1 ) :
"""Add a path to the java class path""" | global _CLASSPATHS
path1 = _os . path . abspath ( path1 )
if _sys . platform == 'cygwin' :
path1 = _posix2win ( path1 )
_CLASSPATHS . add ( str ( path1 ) ) |
async def _connect_one ( self , remote_address ) :
'''Connect to the proxy and perform a handshake requesting a connection .
Return the open socket on success , or the exception on failure .''' | loop = asyncio . get_event_loop ( )
for info in await loop . getaddrinfo ( str ( self . address . host ) , self . address . port , type = socket . SOCK_STREAM ) : # This object has state so is only good for one connection
client = self . protocol ( remote_address , self . auth )
sock = socket . socket ( family = info [ 0 ] )
try : # A non - blocking socket is required by loop socket methods
sock . setblocking ( False )
await loop . sock_connect ( sock , info [ 4 ] )
await self . _handshake ( client , sock , loop )
self . peername = sock . getpeername ( )
return sock
except ( OSError , SOCKSProtocolError ) as e :
exception = e
# Don ' t close the socket because of an asyncio bug
# see https : / / github . com / kyuupichan / aiorpcX / issues / 8
return exception |
def map_query ( self , init_iter = 1000 , later_iter = 20 , dual_threshold = 0.0002 , integrality_gap_threshold = 0.0002 , tighten_triplet = True , max_triplets = 5 , max_iterations = 100 , prolong = False ) :
"""MAP query method using Max Product LP method .
This returns the best assignment of the nodes in the form of a dictionary .
Parameters
init _ iter : integer
Number of maximum iterations that we want MPLP to run for the first time .
later _ iter : integer
Number of maximum iterations that we want MPLP to run for later iterations
dual _ threshold : double
This sets the minimum width between the dual objective decrements . If the decrement is lesser
than the threshold , then that means we have stuck on a local minima .
integrality _ gap _ threshold : double
This sets the threshold for the integrality gap below which we say that the solution
is satisfactory .
tighten _ triplet : bool
set whether to use triplets as clusters or not .
max _ triplets : integer
Set the maximum number of triplets that can be added at once .
max _ iterations : integer
Maximum number of times we tighten the relaxation . Used only when tighten _ triplet is set True .
prolong : bool
If set False : The moment we exhaust of all the triplets the tightening stops .
If set True : The tightening will be performed max _ iterations number of times irrespective of the
triplets .
Reference :
Section 3.3 : The Dual Algorithm ; Tightening LP Relaxation for MAP using Message Passing ( 2008)
By Sontag Et al .
Examples
> > > from pgmpy . models import MarkovModel
> > > from pgmpy . factors . discrete import DiscreteFactor
> > > from pgmpy . inference import Mplp
> > > import numpy as np
> > > student = MarkovModel ( )
> > > student . add _ edges _ from ( [ ( ' A ' , ' B ' ) , ( ' B ' , ' C ' ) , ( ' C ' , ' D ' ) , ( ' E ' , ' F ' ) ] )
> > > factor _ a = DiscreteFactor ( [ ' A ' ] , cardinality = [ 2 ] , values = np . array ( [ 0.54577 , 1.8323 ] ) )
> > > factor _ b = DiscreteFactor ( [ ' B ' ] , cardinality = [ 2 ] , values = np . array ( [ 0.93894 , 1.065 ] ) )
> > > factor _ c = DiscreteFactor ( [ ' C ' ] , cardinality = [ 2 ] , values = np . array ( [ 0.89205 , 1.121 ] ) )
> > > factor _ d = DiscreteFactor ( [ ' D ' ] , cardinality = [ 2 ] , values = np . array ( [ 0.56292 , 1.7765 ] ) )
> > > factor _ e = DiscreteFactor ( [ ' E ' ] , cardinality = [ 2 ] , values = np . array ( [ 0.47117 , 2.1224 ] ) )
> > > factor _ f = DiscreteFactor ( [ ' F ' ] , cardinality = [ 2 ] , values = np . array ( [ 1.5093 , 0.66257 ] ) )
> > > factor _ a _ b = DiscreteFactor ( [ ' A ' , ' B ' ] , cardinality = [ 2 , 2 ] ,
. . . values = np . array ( [ 1.3207 , 0.75717 , 0.75717 , 1.3207 ] ) )
> > > factor _ b _ c = DiscreteFactor ( [ ' B ' , ' C ' ] , cardinality = [ 2 , 2 ] ,
. . . values = np . array ( [ 0.00024189 , 4134.2 , 4134.2 , 0.0002418 ] ) )
> > > factor _ c _ d = DiscreteFactor ( [ ' C ' , ' D ' ] , cardinality = [ 2 , 2 ] ,
. . . values = np . array ( [ 0.0043227 , 231.34 , 231.34 , 0.0043227 ] ) )
> > > factor _ d _ e = DiscreteFactor ( [ ' E ' , ' F ' ] , cardinality = [ 2 , 2 ] ,
. . . values = np . array ( [ 31.228 , 0.032023 , 0.032023 , 31.228 ] ) )
> > > student . add _ factors ( factor _ a , factor _ b , factor _ c , factor _ d , factor _ e , factor _ f ,
. . . factor _ a _ b , factor _ b _ c , factor _ c _ d , factor _ d _ e )
> > > mplp = Mplp ( student )
> > > result = mplp . map _ query ( )
> > > result
{ ' B ' : 0.93894 , ' C ' : 1.121 , ' A ' : 1.8323 , ' F ' : 1.5093 , ' D ' : 1.7765 , ' E ' : 2.12239}""" | self . dual_threshold = dual_threshold
self . integrality_gap_threshold = integrality_gap_threshold
# Run MPLP initially for a maximum of init _ iter times .
self . _run_mplp ( init_iter )
# If triplets are to be used for the tightening , we proceed as follows
if tighten_triplet :
self . _tighten_triplet ( max_iterations , later_iter , max_triplets , prolong )
# Get the best result from the best assignment
self . best_decoded_result = { factor . scope ( ) [ 0 ] : factor . values [ self . best_assignment [ frozenset ( factor . scope ( ) ) ] ] for factor in self . model . factors if len ( factor . scope ( ) ) == 1 }
return self . best_decoded_result |
def indexOf ( self , editor ) :
"""Returns the index of the inputed editor , or - 1 if not found .
: param editor | < QtGui . QWidget >
: return < int >""" | lay = self . layout ( )
for i in range ( lay . count ( ) ) :
if lay . itemAt ( i ) . widget ( ) == editor :
return i
return - 1 |
def AddProperty ( self , interface , name , value ) :
'''Add property to this object
interface : D - Bus interface to add this to . For convenience you can
specify ' ' here to add the property to the object ' s main
interface ( as specified on construction ) .
name : Property name .
value : Property value .''' | if not interface :
interface = self . interface
try :
self . props [ interface ] [ name ]
raise dbus . exceptions . DBusException ( 'property %s already exists' % name , name = self . interface + '.PropertyExists' )
except KeyError : # this is what we expect
pass
# copy . copy removes one level of variant - ness , which means that the
# types get exported in introspection data correctly , but we can ' t do
# this for container types .
if not ( isinstance ( value , dbus . Dictionary ) or isinstance ( value , dbus . Array ) ) :
value = copy . copy ( value )
self . props . setdefault ( interface , { } ) [ name ] = value |
def _copy_with_changed_callback ( self , new_callback ) :
'''Dev API used to wrap the callback with decorators .''' | return PeriodicCallback ( self . _document , new_callback , self . _period , self . _id ) |
def multi_index ( idx , dim ) :
"""Single to multi - index using graded reverse lexicographical notation .
Parameters
idx : int
Index in interger notation
dim : int
The number of dimensions in the multi - index notation
Returns
out : tuple
Multi - index of ` idx ` with ` len ( out ) = dim `
Examples
> > > for idx in range ( 5 ) :
. . . print ( chaospy . bertran . multi _ index ( idx , 3 ) )
(0 , 0 , 0)
(1 , 0 , 0)
(0 , 1 , 0)
(0 , 0 , 1)
(2 , 0 , 0)
See Also
single _ index""" | def _rec ( idx , dim ) :
idxn = idxm = 0
if not dim :
return ( )
if idx == 0 :
return ( 0 , ) * dim
while terms ( idxn , dim ) <= idx :
idxn += 1
idx -= terms ( idxn - 1 , dim )
if idx == 0 :
return ( idxn , ) + ( 0 , ) * ( dim - 1 )
while terms ( idxm , dim - 1 ) <= idx :
idxm += 1
return ( int ( idxn - idxm ) , ) + _rec ( idx , dim - 1 )
return _rec ( idx , dim ) |
def _set_config_defaults ( self , request , form , obj = None ) :
"""Cycle through app _ config _ values and sets the form value according to the
options in the current apphook config .
self . app _ config _ values is a dictionary containing config options as keys , form fields as
values : :
app _ config _ values = {
' apphook _ config ' : ' form _ field ' ,
: param request : request object
: param form : model form for the current model
: param obj : current object
: return : form with defaults set""" | for config_option , field in self . app_config_values . items ( ) :
if field in form . base_fields :
form . base_fields [ field ] . initial = self . get_config_data ( request , obj , config_option )
return form |
def expm1 ( x ) :
"""Calculate exp ( x ) - 1""" | if isinstance ( x , UncertainFunction ) :
mcpts = np . expm1 ( x . _mcpts )
return UncertainFunction ( mcpts )
else :
return np . expm1 ( x ) |
def to_table_data ( self ) :
""": raises ValueError :
: raises pytablereader . error . ValidationError :""" | self . _validate_source_data ( )
self . _loader . inc_table_count ( )
yield TableData ( self . _make_table_name ( ) , [ "key" , "value" ] , [ record for record in self . _buffer . items ( ) ] , dp_extractor = self . _loader . dp_extractor , type_hints = self . _extract_type_hints ( ) , ) |
def _get_formatted_val ( self , obj , name , column ) :
"""Format the value of the attribute ' name ' from the given object""" | attr_path = name . split ( '.' )
val = None
tmp_val = obj
for attr in attr_path :
tmp_val = getattr ( tmp_val , attr , None )
if tmp_val is None :
break
if tmp_val is not None :
val = tmp_val
return format_value ( column , val , self . config_key ) |
def InsertIntArg ( self , string = '' , ** unused_kwargs ) :
"""Inserts an Integer argument .""" | try :
int_value = int ( string )
except ( TypeError , ValueError ) :
raise errors . ParseError ( '{0:s} is not a valid integer.' . format ( string ) )
return self . InsertArg ( int_value ) |
def add_layer ( self , layer ) :
"""Add a layer ( TileTileLayer , TiledImageLayer , or TiledObjectGroup )
: param layer : TileTileLayer , TiledImageLayer , TiledObjectGroup object""" | assert ( isinstance ( layer , ( TiledTileLayer , TiledImageLayer , TiledObjectGroup ) ) )
self . layers . append ( layer )
self . layernames [ layer . name ] = layer |
def setup_config ( config_directories = None , config_file = None , default_filename = "opentc.yml" ) :
"""Setup configuration""" | config_found = False
config_file_path = None
if config_file :
config_file_path = config_file
if os . path . isfile ( config_file_path ) and os . access ( config_file_path , os . R_OK ) :
config_found = True
else :
for directory in config_directories :
if directory is None :
continue
config_file_path = os . path . join ( directory , default_filename )
if os . path . isfile ( config_file_path ) and os . access ( config_file_path , os . R_OK ) :
config_found = True
break
if config_found :
with open ( config_file_path , 'rt' ) as ymlfile :
config = yaml . safe_load ( ymlfile . read ( ) )
return config
else :
print ( "The configuration file is not found." )
exit ( 1 ) |
def copy ( self ) :
'''Returns a copy of self''' | copy = Response ( self . app )
copy . status = self . status
copy . headers = self . headers . copy ( )
copy . content_type = self . content_type
return copy |
def isoparse ( iso_str ) :
"""Parses the limited subset of ` ISO8601 - formatted time ` _ strings as
returned by : meth : ` datetime . datetime . isoformat ` .
> > > epoch _ dt = datetime . utcfromtimestamp ( 0)
> > > iso _ str = epoch _ dt . isoformat ( )
> > > print ( iso _ str )
1970-01-01T00:00:00
> > > isoparse ( iso _ str )
datetime . datetime ( 1970 , 1 , 1 , 0 , 0)
> > > utcnow = datetime . utcnow ( )
> > > utcnow = = isoparse ( utcnow . isoformat ( ) )
True
For further datetime parsing , see the ` iso8601 ` _ package for strict
ISO parsing and ` dateutil ` _ package for loose parsing and more .
. . _ ISO8601 - formatted time : https : / / en . wikipedia . org / wiki / ISO _ 8601
. . _ iso8601 : https : / / pypi . python . org / pypi / iso8601
. . _ dateutil : https : / / pypi . python . org / pypi / python - dateutil""" | dt_args = [ int ( p ) for p in _NONDIGIT_RE . split ( iso_str ) ]
return datetime ( * dt_args ) |
def rollout ( policy , env , timestep_limit = None , add_noise = False , offset = 0 ) :
"""Do a rollout .
If add _ noise is True , the rollout will take noisy actions with
noise drawn from that stream . Otherwise , no action noise will be added .
Parameters
policy : tf object
policy from which to draw actions
env : GymEnv
environment from which to draw rewards , done , and next state
timestep _ limit : int , optional
steps after which to end the rollout
add _ noise : bool , optional
indicates whether exploratory action noise should be added
offset : int , optional
value to subtract from the reward . For example , survival bonus
from humanoid""" | env_timestep_limit = env . spec . max_episode_steps
timestep_limit = ( env_timestep_limit if timestep_limit is None else min ( timestep_limit , env_timestep_limit ) )
rews = [ ]
t = 0
observation = env . reset ( )
for _ in range ( timestep_limit or 999999 ) :
ac = policy . compute ( observation , add_noise = add_noise , update = True ) [ 0 ]
observation , rew , done , _ = env . step ( ac )
rew -= np . abs ( offset )
rews . append ( rew )
t += 1
if done :
break
rews = np . array ( rews , dtype = np . float32 )
return rews , t |
def _key_changed ( self , client , cnxn_id , entry , data = None ) :
"""Callback when a gconf key changes""" | key = self . _fix_key ( entry . key ) [ len ( self . GCONF_DIR ) : ]
value = self . _get_value ( entry . value , self . DEFAULTS [ key ] )
self . emit ( 'conf-changed' , key , value ) |
def GET_account_at ( self , path_info , account_addr , block_height ) :
"""Get the state ( s ) of an account at a particular point in history
Returns [ { . . . } ]""" | if not check_account_address ( account_addr ) :
return self . _reply_json ( { 'error' : 'Invalid address' } , status_code = 400 )
try :
block_height = int ( block_height )
assert check_block ( block_height )
except :
return self . _reply_json ( { 'error' : 'Invalid block height' } , status_code = 400 )
blockstackd_url = get_blockstackd_url ( )
res = blockstackd_client . get_account_at ( account_addr , block_height , hostport = blockstackd_url )
if json_is_error ( res ) :
log . error ( "Failed to list account history for {} at {}: {}" . format ( account_addr , block_height , res [ 'error' ] ) )
return self . _reply_json ( { 'error' : 'Failed to get account state for {} at {}' . format ( account_addr , block_height ) } , status_code = res . get ( 'http_status' , 500 ) )
self . _reply_json ( res )
return |
def statusLine ( self ) :
'String of row and column stats .' | rowinfo = 'row %d/%d (%d selected)' % ( self . cursorRowIndex , self . nRows , len ( self . _selectedRows ) )
colinfo = 'col %d/%d (%d visible)' % ( self . cursorColIndex , self . nCols , len ( self . visibleCols ) )
return '%s %s' % ( rowinfo , colinfo ) |
def scheduler ( broker = None ) :
"""Creates a task from a schedule at the scheduled time and schedules next run""" | if not broker :
broker = get_broker ( )
db . close_old_connections ( )
try :
for s in Schedule . objects . exclude ( repeats = 0 ) . filter ( next_run__lt = timezone . now ( ) ) :
args = ( )
kwargs = { }
# get args , kwargs and hook
if s . kwargs :
try : # eval should be safe here because dict ( )
kwargs = eval ( 'dict({})' . format ( s . kwargs ) )
except SyntaxError :
kwargs = { }
if s . args :
args = ast . literal_eval ( s . args )
# single value won ' t eval to tuple , so :
if type ( args ) != tuple :
args = ( args , )
q_options = kwargs . get ( 'q_options' , { } )
if s . hook :
q_options [ 'hook' ] = s . hook
# set up the next run time
if not s . schedule_type == s . ONCE :
next_run = arrow . get ( s . next_run )
while True :
if s . schedule_type == s . MINUTES :
next_run = next_run . replace ( minutes = + ( s . minutes or 1 ) )
elif s . schedule_type == s . HOURLY :
next_run = next_run . replace ( hours = + 1 )
elif s . schedule_type == s . DAILY :
next_run = next_run . replace ( days = + 1 )
elif s . schedule_type == s . WEEKLY :
next_run = next_run . replace ( weeks = + 1 )
elif s . schedule_type == s . MONTHLY :
next_run = next_run . replace ( months = + 1 )
elif s . schedule_type == s . QUARTERLY :
next_run = next_run . replace ( months = + 3 )
elif s . schedule_type == s . YEARLY :
next_run = next_run . replace ( years = + 1 )
if Conf . CATCH_UP or next_run > arrow . utcnow ( ) :
break
s . next_run = next_run . datetime
s . repeats += - 1
# send it to the cluster
q_options [ 'broker' ] = broker
q_options [ 'group' ] = q_options . get ( 'group' , s . name or s . id )
kwargs [ 'q_options' ] = q_options
s . task = django_q . tasks . async_task ( s . func , * args , ** kwargs )
# log it
if not s . task :
logger . error ( _ ( '{} failed to create a task from schedule [{}]' ) . format ( current_process ( ) . name , s . name or s . id ) )
else :
logger . info ( _ ( '{} created a task from schedule [{}]' ) . format ( current_process ( ) . name , s . name or s . id ) )
# default behavior is to delete a ONCE schedule
if s . schedule_type == s . ONCE :
if s . repeats < 0 :
s . delete ( )
continue
# but not if it has a positive repeats
s . repeats = 0
# save the schedule
s . save ( )
except Exception as e :
logger . error ( e ) |
def copy ( self , target = None , name = None ) :
"""Asynchronously creates a copy of this DriveItem and all it ' s
child elements .
: param target : target location to move to .
If it ' s a drive the item will be moved to the root folder .
: type target : drive . Folder or Drive
: param name : a new name for the copy .
: rtype : CopyOperation""" | if target is None and name is None :
raise ValueError ( 'Must provide a target or a name (or both)' )
if isinstance ( target , Folder ) :
target_id = target . object_id
drive_id = target . drive_id
elif isinstance ( target , Drive ) : # we need the root folder
root_folder = target . get_root_folder ( )
if not root_folder :
return None
target_id = root_folder . object_id
drive_id = root_folder . drive_id
elif target is None :
target_id = None
drive_id = None
else :
raise ValueError ( 'Target, if provided, must be a Folder or Drive' )
if not self . object_id :
return None
if target_id == 'root' :
raise ValueError ( "When copying, target id can't be 'root'" )
url = self . build_url ( self . _endpoints . get ( 'copy' ) . format ( id = self . object_id ) )
if target_id and drive_id :
data = { 'parentReference' : { 'id' : target_id , 'driveId' : drive_id } }
else :
data = { }
if name : # incorporate the extension if the name provided has none .
if not Path ( name ) . suffix and self . name :
name = name + Path ( self . name ) . suffix
data [ 'name' ] = name
response = self . con . post ( url , data = data )
if not response :
return None
# Find out if the server has run a Sync or Async operation
location = response . headers . get ( 'Location' , None )
if 'monitor' in location : # Async operation
return CopyOperation ( parent = self . drive , monitor_url = location )
else : # Sync operation . Item is ready to be retrieved
path = urlparse ( location ) . path
item_id = path . split ( '/' ) [ - 1 ]
return CopyOperation ( parent = self . drive , item_id = item_id ) |
def samefile ( self , other_path ) :
"""Return whether other _ path is the same or not as this file
( as returned by os . path . samefile ( ) ) .""" | if hasattr ( os . path , "samestat" ) :
st = self . stat ( )
try :
other_st = other_path . stat ( )
except AttributeError :
other_st = os . stat ( other_path )
return os . path . samestat ( st , other_st )
else :
filename1 = six . text_type ( self )
filename2 = six . text_type ( other_path )
st1 = _win32_get_unique_path_id ( filename1 )
st2 = _win32_get_unique_path_id ( filename2 )
return st1 == st2 |
def rot ( inputArray , theta = 0 , pc = ( 0 , 0 ) ) :
"""rotate input array with angle of theta
: param inputArray : input array or list ,
e . g . np . array ( [ [ 0,0 ] , [ 0,1 ] , [ 0,2 ] ] ) or [ [ 0,0 ] , [ 0,1 ] , [ 0,2 ] ]
: param theta : rotation angle in degree
: param pc : central point coords ( x , y ) regarding to rotation
: return : rotated numpy array""" | if not isinstance ( inputArray , np . ndarray ) :
inputArray = np . array ( inputArray )
if not isinstance ( pc , np . ndarray ) :
pc = np . array ( pc )
theta = theta / 180.0 * np . pi
# degree to rad
mr = np . array ( [ [ np . cos ( theta ) , - np . sin ( theta ) ] , [ np . sin ( theta ) , np . cos ( theta ) ] , ] )
return np . dot ( mr , ( inputArray - pc ) . transpose ( ) ) . transpose ( ) + pc . transpose ( ) |
def congruent ( self , other ) :
'''A congruent B
True iff all angles of ' A ' equal angles in ' B ' and
all side lengths of ' A ' equal all side lengths of ' B ' , boolean .''' | a = set ( self . angles )
b = set ( other . angles )
if len ( a ) != len ( b ) or len ( a . difference ( b ) ) != 0 :
return False
a = set ( self . sides )
b = set ( other . sides )
return len ( a ) == len ( b ) and len ( a . difference ( b ) ) == 0 |
def parse_network ( network_fp ) :
"""Parses network file and returns a network instance and a gene set .
Attribute :
network _ fp ( str ) : File path to a network file .""" | graph = nx . Graph ( )
gene_set = set ( )
with open ( network_fp ) as inFile :
inFile . readline ( )
# Skip header .
for line in inFile . readlines ( ) :
gene1 , gene2 = line . strip ( ) . split ( )
graph . add_edge ( gene1 , gene2 )
gene_set . add ( gene1 )
gene_set . add ( gene2 )
return graph , gene_set |
def hex ( self ) :
"""Emit the address in bare hex format ( aabbcc ) .""" | addrstr = '000000'
if self . addr is not None :
addrstr = binascii . hexlify ( self . addr ) . decode ( )
return addrstr |
def has_preview ( self ) :
"""stub""" | # I had to add the following check because file record types don ' t seem to be implemented
# correctly for raw edx Question objects
if ( 'fileIds' not in self . my_osid_object . _my_map or 'preview' not in self . my_osid_object . _my_map [ 'fileIds' ] or self . my_osid_object . _my_map [ 'fileIds' ] [ 'preview' ] is None ) :
return False
return bool ( self . my_osid_object . _my_map [ 'fileIds' ] [ 'preview' ] ) |
def render_content ( text , user_content = False , context = None , username = None , password = None , render_offline = False , api_url = None ) :
"""Renders the specified markup and returns the result .""" | renderer = ( GitHubRenderer ( user_content , context , api_url ) if not render_offline else OfflineRenderer ( user_content , context ) )
auth = ( username , password ) if username or password else None
return renderer . render ( text , auth ) |
def upcaseTokens ( s , l , t ) :
"""Helper parse action to convert tokens to upper case .""" | return [ tt . upper ( ) for tt in map ( _ustr , t ) ] |
def yaml_force_unicode ( ) :
"""Force pyyaml to return unicode values .""" | # # modified from | http : / / stackoverflow . com / a / 2967461 |
if sys . version_info [ 0 ] == 2 :
def construct_func ( self , node ) :
return self . construct_scalar ( node )
yaml . Loader . add_constructor ( U ( 'tag:yaml.org,2002:str' ) , construct_func )
yaml . SafeLoader . add_constructor ( U ( 'tag:yaml.org,2002:str' ) , construct_func ) |
def n_to_g ( self , n_interval ) :
"""convert a transcript ( n . ) interval to a genomic ( g . ) interval""" | frs , fre = n_interval . start . base - 1 , n_interval . end . base - 1
start_offset , end_offset = n_interval . start . offset , n_interval . end . offset
if self . strand == - 1 :
fre , frs = self . tgt_len - frs - 1 , self . tgt_len - fre - 1
start_offset , end_offset = - end_offset , - start_offset
# returns the genomic range start ( grs ) and end ( gre )
grs , _ , grs_cigar = self . _map ( from_pos = self . tgt_pos , to_pos = self . ref_pos , pos = frs , base = "start" )
gre , _ , gre_cigar = self . _map ( from_pos = self . tgt_pos , to_pos = self . ref_pos , pos = fre , base = "end" )
grs , gre = grs + self . gc_offset + 1 , gre + self . gc_offset + 1
gs , ge = grs + start_offset , gre + end_offset
# The returned interval would be uncertain when locating at alignment gaps
return hgvs . location . Interval ( start = hgvs . location . SimplePosition ( gs , uncertain = n_interval . start . uncertain ) , end = hgvs . location . SimplePosition ( ge , uncertain = n_interval . end . uncertain ) , uncertain = grs_cigar in 'DI' or gre_cigar in 'DI' ) |
def add_source ( self , evidence_line , source , label = None , src_type = None ) :
"""Applies the triples :
< evidence > < dc : source > < source >
< source > < rdf : type > < type >
< source > < rdfs : label > " label "
TODO this should belong in a higher level class
: param evidence _ line : str curie
: param source : str source as curie
: param label : optional , str type as curie
: param type : optional , str type as curie
: return : None""" | self . graph . addTriple ( evidence_line , self . globaltt [ 'source' ] , source )
self . model . addIndividualToGraph ( source , label , src_type )
return |
def get_daemon_stats ( self , details = False ) :
"""Send a HTTP request to the satellite ( GET / get _ daemon _ stats )
: return : Daemon statistics
: rtype : dict""" | logger . debug ( "Get daemon statistics for %s, %s %s" , self . name , self . alive , self . reachable )
return self . con . get ( 'stats%s' % ( '?details=1' if details else '' ) ) |
def end_policy_update ( self ) :
"""Inform Metrics class that policy update has started .""" | if self . time_policy_update_start :
self . delta_policy_update = time ( ) - self . time_policy_update_start
else :
self . delta_policy_update = 0
delta_train_start = time ( ) - self . time_training_start
LOGGER . debug ( " Policy Update Training Metrics for {}: " "\n\t\tTime to update Policy: {:0.3f} s \n" "\t\tTime elapsed since training: {:0.3f} s \n" "\t\tTime for experience collection: {:0.3f} s \n" "\t\tBuffer Length: {} \n" "\t\tReturns : {:0.3f}\n" . format ( self . brain_name , self . delta_policy_update , delta_train_start , self . delta_last_experience_collection , self . last_buffer_length , self . last_mean_return ) )
self . _add_row ( delta_train_start ) |
def setuptools_install_options ( local_storage_folder ) :
"""Return options to make setuptools use installations from the given folder .
No other installation source is allowed .""" | if local_storage_folder is None :
return [ ]
# setuptools expects its find - links parameter to contain a list of link
# sources ( either local paths , file : URLs pointing to folders or URLs
# pointing to a file containing HTML links ) separated by spaces . That means
# that , when specifying such items , whether local paths or URLs , they must
# not contain spaces . The problem can be worked around by using a local
# file URL , since URLs can contain space characters encoded as ' % 20 ' ( for
# more detailed information see below ) .
# Any URL referencing a folder needs to be specified with a trailing ' / '
# character in order for setuptools to correctly recognize it as a folder .
# All this has been tested using Python 2.4.3/2.4.4 & setuptools 1.4/1.4.2
# as well as Python 3.4 & setuptools 3.3.
# Supporting paths with spaces - method 1:
# One way would be to prepare a link file and pass an URL referring to that
# link file . The link file needs to contain a list of HTML link tags
# ( < a href = " . . . " / > ) , one for every item stored inside the local storage
# folder . If a link file references a folder whose name matches the desired
# requirement name , it will be searched recursively ( as described in method
# 2 below ) .
# Note that in order for setuptools to recognize a local link file URL
# correctly , the file needs to be named with the ' . html ' extension . That
# will cause the underlying urllib2 . open ( ) operation to return the link
# file ' s content type as ' text / html ' which is required for setuptools to
# recognize a valid link file .
# Supporting paths with spaces - method 2:
# Another possible way is to use an URL referring to the local storage
# folder directly . This will cause setuptools to prepare and use a link
# file internally - with its content read from a ' index . html ' file located
# in the given local storage folder , if it exists , or constructed so it
# contains HTML links to all top - level local storage folder items , as
# described for method 1 above .
if " " in local_storage_folder :
find_links_param = utility . path_to_URL ( local_storage_folder )
if find_links_param [ - 1 ] != "/" :
find_links_param += "/"
else :
find_links_param = local_storage_folder
return [ "-f" , find_links_param , "--allow-hosts=None" ] |
def run ( self , repo : str , branch : str , task : Task , * , depth : DepthDefinitionType = 1 , reference : ReferenceDefinitionType = None ) -> Result :
"""Runs the ` ` task ` ` using the configured backend .
: param repo : Target git repository
: param branch : Target git branch
: param task : Task which will be run in the target repository
: param depth : How many commits back should the repo be cloned in case the target repository isn ' t cloned yet .
Defaults to 1 , must be bigger than 0 . No limit will be used if ` ` None ` ` is set .
: param reference : A path to a repository from which the target repository is forked ,
to save bandwidth , ` - - dissociate ` is used if set .
: return : A : class : ` Result ` instance with the output of the task .
: raise PullError : If the repository can ' t be cloned or pulled
: raise BuildError : If the task fails .""" | self . validate_repo_url ( repo )
depth = self . validate_depth ( depth )
reference = self . validate_reference ( reference )
logger . info ( "Running Arca task %r for repo '%s' in branch '%s'" , task , repo , branch )
git_repo , repo_path = self . get_files ( repo , branch , depth = depth , reference = reference )
def create_value ( ) :
logger . debug ( "Value not in cache, creating." )
return self . backend . run ( repo , branch , task , git_repo , repo_path )
cache_key = self . cache_key ( repo , branch , task , git_repo )
logger . debug ( "Cache key is %s" , cache_key )
return self . region . get_or_create ( cache_key , create_value , should_cache_fn = self . should_cache_fn ) |
def cli ( conf ) :
"""OpenVPN status initdb method""" | try :
config = init_config ( conf )
debug = config . getboolean ( 'DEFAULT' , 'debug' )
conn = get_conn ( config . get ( 'DEFAULT' , 'statusdb' ) )
cur = conn . cursor ( )
sqlstr = '''create table client_status
(session_id text PRIMARY KEY, username text, userip text,
realip text, realport int,ctime int,
inbytes int, outbytes int,
acct_interval int, session_timeout int, uptime int)
'''
try :
cur . execute ( 'drop table client_status' )
except :
pass
cur . execute ( sqlstr )
print 'flush client status database'
conn . commit ( )
conn . close ( )
except :
traceback . print_exc ( ) |
def update ( self , value = None , label = None ) :
"""Update the progress bar
@ type value : int
@ type label : str""" | if label :
self . label = label
super ( ProgressBar , self ) . update ( value ) |
def uavionix_adsb_out_cfg_send ( self , ICAO , callsign , emitterType , aircraftSize , gpsOffsetLat , gpsOffsetLon , stallSpeed , rfSelect , force_mavlink1 = False ) :
'''Static data to configure the ADS - B transponder ( send within 10 sec of
a POR and every 10 sec thereafter )
ICAO : Vehicle address ( 24 bit ) ( uint32 _ t )
callsign : Vehicle identifier ( 8 characters , null terminated , valid characters are A - Z , 0-9 , " " only ) ( char )
emitterType : Transmitting vehicle type . See ADSB _ EMITTER _ TYPE enum ( uint8 _ t )
aircraftSize : Aircraft length and width encoding ( table 2-35 of DO - 282B ) ( uint8 _ t )
gpsOffsetLat : GPS antenna lateral offset ( table 2-36 of DO - 282B ) ( uint8 _ t )
gpsOffsetLon : GPS antenna longitudinal offset from nose [ if non - zero , take position ( in meters ) divide by 2 and add one ] ( table 2-37 DO - 282B ) ( uint8 _ t )
stallSpeed : Aircraft stall speed in cm / s ( uint16 _ t )
rfSelect : ADS - B transponder reciever and transmit enable flags ( uint8 _ t )''' | return self . send ( self . uavionix_adsb_out_cfg_encode ( ICAO , callsign , emitterType , aircraftSize , gpsOffsetLat , gpsOffsetLon , stallSpeed , rfSelect ) , force_mavlink1 = force_mavlink1 ) |
def graphDensityContourPlot ( G , iters = 50 , layout = None , layoutScaleFactor = 1 , overlay = False , nodeSize = 10 , axisSamples = 100 , blurringFactor = .1 , contours = 15 , graphType = 'coloured' ) :
"""Creates a 3D plot giving the density of nodes on a 2D plane , as a surface in 3D .
Most of the options are for tweaking the final appearance . _ layout _ and _ layoutScaleFactor _ allow a pre - layout graph to be provided . If a layout is not provided the [ networkx . spring _ layout ( ) ] ( https : / / networkx . github . io / documentation / latest / reference / generated / networkx . drawing . layout . spring _ layout . html ) is used after _ iters _ iterations . Then , once the graph has been laid out a grid of _ axisSamples _ cells by _ axisSamples _ cells is overlaid and the number of nodes in each cell is determined , a gaussian blur is then applied with a sigma of _ blurringFactor _ . This then forms a surface in 3 dimensions , which is then plotted .
If you find the resultant image looks too banded raise the the _ contours _ number to ~ 50.
# Parameters
_ G _ : ` networkx Graph `
> The graph to be plotted
_ iters _ : ` optional [ int ] `
> Default ` 50 ` , the number of iterations for the spring layout if _ layout _ is not provided .
_ layout _ : ` optional [ networkx layout dictionary ] `
> Default ` None ` , if provided will be used as a layout of the graph , the maximum distance from the origin along any axis must also given as _ layoutScaleFactor _ , which is by default ` 1 ` .
_ layoutScaleFactor _ : ` optional [ double ] `
> Default ` 1 ` , The maximum distance from the origin allowed along any axis given by _ layout _ , i . e . the layout must fit in a square centered at the origin with side lengths 2 * _ layoutScaleFactor _
_ overlay _ : ` optional [ bool ] `
> Default ` False ` , if ` True ` the 2D graph will be plotted on the X - Y plane at Z = 0.
_ nodeSize _ : ` optional [ double ] `
> Default ` 10 ` , the size of the nodes dawn in the overlay
_ axisSamples _ : ` optional [ int ] `
> Default 100 , the number of cells used along each axis for sampling . A larger number will mean a lower average density .
_ blurringFactor _ : ` optional [ double ] `
> Default ` 0.1 ` , the sigma value used for smoothing the surface density . The higher this number the smoother the surface .
_ contours _ : ` optional [ int ] `
> Default 15 , the number of different heights drawn . If this number is low the resultant image will look very banded . It is recommended this be raised above ` 50 ` if you want your images to look good , * * Warning * * this will make them much slower to generate and interact with .
_ graphType _ : ` optional [ str ] `
> Default ` ' coloured ' ` , if ` ' coloured ' ` the image will have a destiny based colourization applied , the only other option is ` ' solid ' ` which removes the colourization .""" | from mpl_toolkits . mplot3d import Axes3D
if not isinstance ( G , nx . classes . digraph . DiGraph ) and not isinstance ( G , nx . classes . graph . Graph ) :
raise TypeError ( "{} is not a valid input." . format ( type ( G ) ) )
if layout is None :
layout = nx . spring_layout ( G , scale = axisSamples - 1 , iterations = iters )
grid = np . zeros ( [ axisSamples , axisSamples ] , dtype = np . float32 )
for v in layout . values ( ) :
x , y = tuple ( int ( x ) for x in v . round ( 0 ) )
grid [ y ] [ x ] += 1
elif isinstance ( layout , dict ) :
layout = layout . copy ( )
grid = np . zeros ( [ axisSamples , axisSamples ] , dtype = np . float32 )
multFactor = ( axisSamples - 1 ) / layoutScaleFactor
for k in layout . keys ( ) :
tmpPos = layout [ k ] * multFactor
layout [ k ] = tmpPos
x , y = tuple ( int ( x ) for x in tmpPos . round ( 0 ) )
grid [ y ] [ x ] += 1
else :
raise TypeError ( "{} is not a valid input." . format ( type ( layout ) ) )
fig = plt . figure ( )
# axis = fig . add _ subplot ( 111)
axis = fig . gca ( projection = '3d' )
if overlay :
nx . draw_networkx ( G , pos = layout , ax = axis , node_size = nodeSize , with_labels = False , edgelist = [ ] )
grid = ndi . gaussian_filter ( grid , ( blurringFactor * axisSamples , blurringFactor * axisSamples ) )
X = Y = np . arange ( 0 , axisSamples , 1 )
X , Y = np . meshgrid ( X , Y )
if graphType == "solid" :
CS = axis . plot_surface ( X , Y , grid )
else :
CS = axis . contourf ( X , Y , grid , contours )
axis . set_xlabel ( 'X' )
axis . set_ylabel ( 'Y' )
axis . set_zlabel ( 'Node Density' ) |
def get_learning_path_session_for_objective_bank ( self , objective_bank_id = None ) :
"""Gets the OsidSession associated with the learning path service
for the given objective bank .
arg : objectiveBankId ( osid . id . Id ) : the Id of the
ObjectiveBank
return : ( osid . learning . LearningPathSession ) - a
LearningPathSession
raise : NotFound - no objective bank found by the given Id
raise : NullArgument - objectiveBankId is null
raise : OperationFailed - unable to complete request
raise : Unimplemented - supporty _ learning _ path ( ) or
supports _ visible _ federation ( ) is false
compliance : optional - This method must be implemented if
supports _ learning _ path ( ) and
supports _ visible _ federation ( ) are true""" | if not objective_bank_id :
raise NullArgument
if not self . supports_learning_path ( ) :
raise Unimplemented ( )
try :
from . import sessions
except ImportError :
raise OperationFailed ( )
try :
session = sessions . LearningPathSession ( objective_bank_id , runtime = self . _runtime )
except AttributeError :
raise OperationFailed ( )
return session |
def request ( self , path_segment , method = "GET" , headers = None , body = "" , owner = None , app = None , sharing = None ) :
"""Issues an arbitrary HTTP request to the REST path segment .
This method is named to match ` ` httplib . request ` ` . This function
makes a single round trip to the server .
If * owner * , * app * , and * sharing * are omitted , this method uses the
default : class : ` Context ` namespace . All other keyword arguments are
included in the URL as query parameters .
: raises AuthenticationError : Raised when the ` ` Context ` ` object is not
logged in .
: raises HTTPError : Raised when an error occurred in a GET operation from
* path _ segment * .
: param path _ segment : A REST path segment .
: type path _ segment : ` ` string ` `
: param method : The HTTP method to use ( optional ) .
: type method : ` ` string ` `
: param headers : List of extra HTTP headers to send ( optional ) .
: type headers : ` ` list ` ` of 2 - tuples .
: param body : Content of the HTTP request ( optional ) .
: type body : ` ` string ` `
: param owner : The owner context of the namespace ( optional ) .
: type owner : ` ` string ` `
: param app : The app context of the namespace ( optional ) .
: type app : ` ` string ` `
: param sharing : The sharing mode of the namespace ( optional ) .
: type sharing : ` ` string ` `
: param query : All other keyword arguments , which are used as query
parameters .
: type query : ` ` string ` `
: return : The response from the server .
: rtype : ` ` dict ` ` with keys ` ` body ` ` , ` ` headers ` ` , ` ` reason ` ` ,
and ` ` status ` `
* * Example * * : :
c = binding . connect ( . . . )
c . request ( ' saved / searches ' , method = ' GET ' ) = = \ { ' body ' : . . . a response reader object . . . ,
' headers ' : [ ( ' content - length ' , ' 46722 ' ) ,
( ' expires ' , ' Fri , 30 Oct 1998 00:00:00 GMT ' ) ,
( ' server ' , ' Splunkd ' ) ,
( ' connection ' , ' close ' ) ,
( ' cache - control ' , ' no - store , max - age = 0 , must - revalidate , no - cache ' ) ,
( ' date ' , ' Fri , 11 May 2012 17:24:19 GMT ' ) ,
( ' content - type ' , ' text / xml ; charset = utf - 8 ' ) ] ,
' reason ' : ' OK ' ,
' status ' : 200}
c . request ( ' nonexistant / path ' , method = ' GET ' ) # raises HTTPError
c . logout ( )
c . get ( ' apps / local ' ) # raises AuthenticationError""" | if headers is None :
headers = [ ]
path = self . authority + self . _abspath ( path_segment , owner = owner , app = app , sharing = sharing )
all_headers = headers + self . additional_headers + self . _auth_headers
logging . debug ( "%s request to %s (headers: %s, body: %s)" , method , path , str ( all_headers ) , repr ( body ) )
response = self . http . request ( path , { 'method' : method , 'headers' : all_headers , 'body' : body } )
return response |
def delete_user_favorite ( self , series_id ) :
"""Deletes the series of the provided id from the favorites list of the current user .
: param series _ id : The TheTVDB id of the series .
: return : a python dictionary with either the result of the search or an error from TheTVDB .""" | return self . parse_raw_response ( requests_util . run_request ( 'delete' , self . API_BASE_URL + '/user/favorites/%d' % series_id , headers = self . __get_header_with_auth ( ) ) ) |
def forward_message ( self , from_chat_id , message_id ) :
"""Forward a message from another chat to this chat .
: param int from _ chat _ id : ID of the chat to forward the message from
: param int message _ id : ID of the message to forward""" | return self . bot . api_call ( "forwardMessage" , chat_id = self . id , from_chat_id = from_chat_id , message_id = message_id , ) |
def append_item ( self , item ) :
"""Append new item to set""" | if not isinstance ( item , LR0Item ) :
raise TypeError
self . itemlist . append ( item ) |
def put_metric_data ( self , namespace , name , value = None , timestamp = None , unit = None , dimensions = None , statistics = None ) :
"""Publishes metric data points to Amazon CloudWatch . Amazon Cloudwatch
associates the data points with the specified metric . If the specified
metric does not exist , Amazon CloudWatch creates the metric . If a list
is specified for some , but not all , of the arguments , the remaining
arguments are repeated a corresponding number of times .
: type namespace : str
: param namespace : The namespace of the metric .
: type name : str or list
: param name : The name of the metric .
: type value : float or list
: param value : The value for the metric .
: type timestamp : datetime or list
: param timestamp : The time stamp used for the metric . If not specified ,
the default value is set to the time the metric data was received .
: type unit : string or list
: param unit : The unit of the metric . Valid Values : Seconds |
Microseconds | Milliseconds | Bytes | Kilobytes |
Megabytes | Gigabytes | Terabytes | Bits | Kilobits |
Megabits | Gigabits | Terabits | Percent | Count |
Bytes / Second | Kilobytes / Second | Megabytes / Second |
Gigabytes / Second | Terabytes / Second | Bits / Second |
Kilobits / Second | Megabits / Second | Gigabits / Second |
Terabits / Second | Count / Second | None
: type dimensions : dict
: param dimensions : Add extra name value pairs to associate
with the metric , i . e . :
{ ' name1 ' : value1 , ' name2 ' : ( value2 , value3 ) }
: type statistics : dict or list
: param statistics : Use a statistic set instead of a value , for example : :
{ ' maximum ' : 30 , ' minimum ' : 1 , ' samplecount ' : 100 , ' sum ' : 10000}""" | params = { 'Namespace' : namespace }
self . build_put_params ( params , name , value = value , timestamp = timestamp , unit = unit , dimensions = dimensions , statistics = statistics )
return self . get_status ( 'PutMetricData' , params ) |
def add_children_gos ( self , gos ) :
"""Return children of input gos plus input gos .""" | lst = [ ]
obo_dag = self . obo_dag
get_children = lambda go_obj : list ( go_obj . get_all_children ( ) ) + [ go_obj . id ]
for go_id in gos :
go_obj = obo_dag [ go_id ]
lst . extend ( get_children ( go_obj ) )
return set ( lst ) |
def get_accounts ( self , owner_id = None , member_id = None , properties = None ) :
"""GetAccounts .
Get a list of accounts for a specific owner or a specific member .
: param str owner _ id : ID for the owner of the accounts .
: param str member _ id : ID for a member of the accounts .
: param str properties :
: rtype : [ Account ]""" | query_parameters = { }
if owner_id is not None :
query_parameters [ 'ownerId' ] = self . _serialize . query ( 'owner_id' , owner_id , 'str' )
if member_id is not None :
query_parameters [ 'memberId' ] = self . _serialize . query ( 'member_id' , member_id , 'str' )
if properties is not None :
query_parameters [ 'properties' ] = self . _serialize . query ( 'properties' , properties , 'str' )
response = self . _send ( http_method = 'GET' , location_id = '229a6a53-b428-4ffb-a835-e8f36b5b4b1e' , version = '5.0' , query_parameters = query_parameters )
return self . _deserialize ( '[Account]' , self . _unwrap_collection ( response ) ) |
def day ( t , now = None , format = '%B %d' ) :
'''Date delta compared to ` ` t ` ` . You can override ` ` now ` ` to specify what date
to compare to .
You can override the date format by supplying a ` ` format ` ` parameter .
: param t : timestamp , : class : ` datetime . date ` or : class : ` datetime . datetime `
object
: param now : default ` ` None ` ` , optionally a : class : ` datetime . datetime `
object
: param format : default ` ` ' % B % d ' ` `
> > > import time
> > > print ( day ( time . time ( ) ) )
today
> > > print ( day ( time . time ( ) - 86400 ) )
yesterday
> > > print ( day ( time . time ( ) - 604800 ) )
last week
> > > print ( day ( time . time ( ) + 86400 ) )
tomorrow
> > > print ( day ( time . time ( ) + 604800 ) )
next week''' | t1 = _to_date ( t )
t2 = _to_date ( now or datetime . datetime . now ( ) )
diff = t1 - t2
secs = _total_seconds ( diff )
days = abs ( diff . days )
if days == 0 :
return _ ( 'today' )
elif days == 1 :
if secs < 0 :
return _ ( 'yesterday' )
else :
return _ ( 'tomorrow' )
elif days == 7 :
if secs < 0 :
return _ ( 'last week' )
else :
return _ ( 'next week' )
else :
return t1 . strftime ( format ) |
def get_db_prep_save ( self , value ) :
"""Convert our JSON object to a string before we save""" | if not value :
return super ( JSONField , self ) . get_db_prep_save ( "" )
else :
return super ( JSONField , self ) . get_db_prep_save ( dumps ( value ) ) |
def run_norm ( net , df = None , norm_type = 'zscore' , axis = 'row' , keep_orig = False ) :
'''A dataframe ( more accurately a dictionary of dataframes , e . g . mat ,
mat _ up . . . ) can be passed to run _ norm and a normalization will be run (
e . g . zscore ) on either the rows or columns''' | # df here is actually a dictionary of several dataframes , ' mat ' , ' mat _ orig ' ,
# etc
if df is None :
df = net . dat_to_df ( )
if norm_type == 'zscore' :
df = zscore_df ( df , axis , keep_orig )
if norm_type == 'qn' :
df = qn_df ( df , axis , keep_orig )
net . df_to_dat ( df ) |
def watch_docs ( ctx ) :
"""Run build the docs when a file changes .""" | try :
import sphinx_autobuild
# noqa
except ImportError :
print ( 'ERROR: watch task requires the sphinx_autobuild package.' )
print ( 'Install it with:' )
print ( ' pip install sphinx-autobuild' )
sys . exit ( 1 )
docs ( ctx )
ctx . run ( 'sphinx-autobuild {} {}' . format ( docs_dir , build_dir ) , pty = True ) |
def get_user_permission_full_codename ( perm ) :
"""Returns ' app _ label . < perm > _ < usermodulename > ' . If standard ` ` auth . User ` ` is
used , for ' change ' perm this would return ` ` auth . change _ user ` ` and if
` ` myapp . CustomUser ` ` is used it would return ` ` myapp . change _ customuser ` ` .""" | User = get_user_model ( )
return '%s.%s_%s' % ( User . _meta . app_label , perm , User . _meta . module_name ) |
def get_clean_content ( self ) :
"""Implementation of the clean ( ) method .""" | fill_chars = { 'BLANK_TEMPLATE' : ' ' , 'ECHO_TEMPLATE' : '0' }
for match in self . pattern . finditer ( self . html_content ) :
start , end = match . start ( ) , match . end ( )
tag = _get_tag ( match )
if tag == 'ECHO' :
self . _write_content ( start )
self . _index = start
self . _state = 'ECHO_TEMPLATE'
elif tag == 'START' :
if self . _index != start :
self . _write_content ( start )
self . _index = start
self . _state = 'BLANK_TEMPLATE'
elif tag == 'END' :
if self . _state not in ( 'BLANK_TEMPLATE' , 'ECHO_TEMPLATE' ) : # We got a closing tag but none was open . We decide to carry
# on as it may be the case that it was because of a closing
# dictionary in javascript like : var dict = { foo : { } } .
# See the note on the clean ( ) function for more details .
continue
fill_char = fill_chars [ self . _state ]
fill = fill_char * ( end - self . _index )
if self . _state == 'BLANK_TEMPLATE' :
self . _pending . append ( fill )
self . _pending_has_blank = True
else :
assert not self . _pending
self . _output . write ( fill )
self . _index = end
self . _state = 'HTML'
elif tag == 'SPACES' :
self . _pending . append ( match . group ( 'spaces' ) )
self . _index = end
elif tag == 'NEWLINE' :
if self . _state == 'HTML' :
if self . _index != start or not self . _pending_has_blank :
self . _write_content ( start )
self . _output . write ( match . group ( 'newline' ) )
elif self . _state == 'BLANK_TEMPLATE' : # We discard the content of this template and whatever is in
# self . _ pending .
self . _output . write ( match . group ( 'newline' ) )
elif self . _state == 'ECHO_TEMPLATE' :
assert False , 'Echo tags should be in just one line.'
self . _index = end
self . _reset_pending ( )
assert self . _state == 'HTML' , 'Tag was not closed'
if self . _index != len ( self . html_content ) or not self . _pending_has_blank :
self . _write_content ( )
return self . _output . getvalue ( ) |
def export ( self ) :
"""Generate a NIDM - Results export .""" | try :
if not os . path . isdir ( self . export_dir ) :
os . mkdir ( self . export_dir )
# Initialise main bundle
self . _create_bundle ( self . version )
self . add_object ( self . software )
# Add model fitting steps
if not isinstance ( self . model_fittings , list ) :
self . model_fittings = list ( self . model_fittings . values ( ) )
for model_fitting in self . model_fittings : # Design Matrix
# model _ fitting . activity . used ( model _ fitting . design _ matrix )
self . bundle . used ( model_fitting . activity . id , model_fitting . design_matrix . id )
self . add_object ( model_fitting . design_matrix )
# * * * Export visualisation of the design matrix
self . add_object ( model_fitting . design_matrix . image )
if model_fitting . design_matrix . image . file is not None :
self . add_object ( model_fitting . design_matrix . image . file )
if model_fitting . design_matrix . hrf_models is not None : # drift model
self . add_object ( model_fitting . design_matrix . drift_model )
if self . version [ 'major' ] > 1 or ( self . version [ 'major' ] == 1 and self . version [ 'minor' ] >= 3 ) : # Machine
# model _ fitting . data . wasAttributedTo ( model _ fitting . machine )
self . bundle . wasAttributedTo ( model_fitting . data . id , model_fitting . machine . id )
self . add_object ( model_fitting . machine )
# Imaged subject or group ( s )
for sub in model_fitting . subjects :
self . add_object ( sub )
# model _ fitting . data . wasAttributedTo ( sub )
self . bundle . wasAttributedTo ( model_fitting . data . id , sub . id )
# Data
# model _ fitting . activity . used ( model _ fitting . data )
self . bundle . used ( model_fitting . activity . id , model_fitting . data . id )
self . add_object ( model_fitting . data )
# Error Model
# model _ fitting . activity . used ( model _ fitting . error _ model )
self . bundle . used ( model_fitting . activity . id , model_fitting . error_model . id )
self . add_object ( model_fitting . error_model )
# Parameter Estimate Maps
for param_estimate in model_fitting . param_estimates : # param _ estimate . wasGeneratedBy ( model _ fitting . activity )
self . bundle . wasGeneratedBy ( param_estimate . id , model_fitting . activity . id )
self . add_object ( param_estimate )
self . add_object ( param_estimate . coord_space )
self . add_object ( param_estimate . file )
if param_estimate . derfrom is not None :
self . bundle . wasDerivedFrom ( param_estimate . id , param_estimate . derfrom . id )
self . add_object ( param_estimate . derfrom )
self . add_object ( param_estimate . derfrom . file , export_file = False )
# Residual Mean Squares Map
# model _ fitting . rms _ map . wasGeneratedBy ( model _ fitting . activity )
self . add_object ( model_fitting . rms_map )
self . bundle . wasGeneratedBy ( model_fitting . rms_map . id , model_fitting . activity . id )
self . add_object ( model_fitting . rms_map . coord_space )
self . add_object ( model_fitting . rms_map . file )
if model_fitting . rms_map . derfrom is not None :
self . bundle . wasDerivedFrom ( model_fitting . rms_map . id , model_fitting . rms_map . derfrom . id )
self . add_object ( model_fitting . rms_map . derfrom )
self . add_object ( model_fitting . rms_map . derfrom . file , export_file = False )
# Resels per Voxel Map
if model_fitting . rpv_map is not None :
self . add_object ( model_fitting . rpv_map )
self . bundle . wasGeneratedBy ( model_fitting . rpv_map . id , model_fitting . activity . id )
self . add_object ( model_fitting . rpv_map . coord_space )
self . add_object ( model_fitting . rpv_map . file )
if model_fitting . rpv_map . inf_id is not None :
self . bundle . used ( model_fitting . rpv_map . inf_id , model_fitting . rpv_map . id )
if model_fitting . rpv_map . derfrom is not None :
self . bundle . wasDerivedFrom ( model_fitting . rpv_map . id , model_fitting . rpv_map . derfrom . id )
self . add_object ( model_fitting . rpv_map . derfrom )
self . add_object ( model_fitting . rpv_map . derfrom . file , export_file = False )
# Mask
# model _ fitting . mask _ map . wasGeneratedBy ( model _ fitting . activity )
self . bundle . wasGeneratedBy ( model_fitting . mask_map . id , model_fitting . activity . id )
self . add_object ( model_fitting . mask_map )
if model_fitting . mask_map . derfrom is not None :
self . bundle . wasDerivedFrom ( model_fitting . mask_map . id , model_fitting . mask_map . derfrom . id )
self . add_object ( model_fitting . mask_map . derfrom )
self . add_object ( model_fitting . mask_map . derfrom . file , export_file = False )
# Create coordinate space export
self . add_object ( model_fitting . mask_map . coord_space )
# Create " Mask map " entity
self . add_object ( model_fitting . mask_map . file )
# Grand Mean map
# model _ fitting . grand _ mean _ map . wasGeneratedBy ( model _ fitting . activity )
self . bundle . wasGeneratedBy ( model_fitting . grand_mean_map . id , model_fitting . activity . id )
self . add_object ( model_fitting . grand_mean_map )
# Coordinate space entity
self . add_object ( model_fitting . grand_mean_map . coord_space )
# Grand Mean Map entity
self . add_object ( model_fitting . grand_mean_map . file )
# Model Parameters Estimation activity
self . add_object ( model_fitting . activity )
self . bundle . wasAssociatedWith ( model_fitting . activity . id , self . software . id )
# model _ fitting . activity . wasAssociatedWith ( self . software )
# self . add _ object ( model _ fitting )
# Add contrast estimation steps
analysis_masks = dict ( )
for ( model_fitting_id , pe_ids ) , contrasts in list ( self . contrasts . items ( ) ) :
for contrast in contrasts :
model_fitting = self . _get_model_fitting ( model_fitting_id )
# for contrast in contrasts :
# contrast . estimation . used ( model _ fitting . rms _ map )
self . bundle . used ( contrast . estimation . id , model_fitting . rms_map . id )
# contrast . estimation . used ( model _ fitting . mask _ map )
self . bundle . used ( contrast . estimation . id , model_fitting . mask_map . id )
analysis_masks [ contrast . estimation . id ] = model_fitting . mask_map . id
self . bundle . used ( contrast . estimation . id , contrast . weights . id )
self . bundle . used ( contrast . estimation . id , model_fitting . design_matrix . id )
# contrast . estimation . wasAssociatedWith ( self . software )
self . bundle . wasAssociatedWith ( contrast . estimation . id , self . software . id )
for pe_id in pe_ids : # contrast . estimation . used ( pe _ id )
self . bundle . used ( contrast . estimation . id , pe_id )
# Create estimation activity
self . add_object ( contrast . estimation )
# Create contrast weights
self . add_object ( contrast . weights )
if contrast . contrast_map is not None : # Create contrast Map
# contrast . contrast _ map . wasGeneratedBy ( contrast . estimation )
self . bundle . wasGeneratedBy ( contrast . contrast_map . id , contrast . estimation . id )
self . add_object ( contrast . contrast_map )
self . add_object ( contrast . contrast_map . coord_space )
# Copy contrast map in export directory
self . add_object ( contrast . contrast_map . file )
if contrast . contrast_map . derfrom is not None :
self . bundle . wasDerivedFrom ( contrast . contrast_map . id , contrast . contrast_map . derfrom . id )
self . add_object ( contrast . contrast_map . derfrom )
self . add_object ( contrast . contrast_map . derfrom . file , export_file = False )
# Create Std Err . Map ( T - tests ) or Explained Mean Sq . Map
# ( F - tests )
# contrast . stderr _ or _ expl _ mean _ sq _ map . wasGeneratedBy
# ( contrast . estimation )
stderr_explmeansq_map = ( contrast . stderr_or_expl_mean_sq_map )
self . bundle . wasGeneratedBy ( stderr_explmeansq_map . id , contrast . estimation . id )
self . add_object ( stderr_explmeansq_map )
self . add_object ( stderr_explmeansq_map . coord_space )
if isinstance ( stderr_explmeansq_map , ContrastStdErrMap ) and stderr_explmeansq_map . contrast_var :
self . add_object ( stderr_explmeansq_map . contrast_var )
if stderr_explmeansq_map . var_coord_space :
self . add_object ( stderr_explmeansq_map . var_coord_space )
if stderr_explmeansq_map . contrast_var . coord_space :
self . add_object ( stderr_explmeansq_map . contrast_var . coord_space )
self . add_object ( stderr_explmeansq_map . contrast_var . file , export_file = False )
self . bundle . wasDerivedFrom ( stderr_explmeansq_map . id , stderr_explmeansq_map . contrast_var . id )
self . add_object ( stderr_explmeansq_map . file )
# Create Statistic Map
# contrast . stat _ map . wasGeneratedBy ( contrast . estimation )
self . bundle . wasGeneratedBy ( contrast . stat_map . id , contrast . estimation . id )
self . add_object ( contrast . stat_map )
self . add_object ( contrast . stat_map . coord_space )
# Copy Statistical map in export directory
self . add_object ( contrast . stat_map . file )
if contrast . stat_map . derfrom is not None :
self . bundle . wasDerivedFrom ( contrast . stat_map . id , contrast . stat_map . derfrom . id )
self . add_object ( contrast . stat_map . derfrom )
self . add_object ( contrast . stat_map . derfrom . file , export_file = False )
# Create Z Statistic Map
if contrast . z_stat_map : # contrast . z _ stat _ map . wasGeneratedBy ( contrast . estimation )
self . bundle . wasGeneratedBy ( contrast . z_stat_map . id , contrast . estimation . id )
self . add_object ( contrast . z_stat_map )
self . add_object ( contrast . z_stat_map . coord_space )
# Copy Statistical map in export directory
self . add_object ( contrast . z_stat_map . file )
# self . add _ object ( contrast )
# Add inference steps
for contrast_id , inferences in list ( self . inferences . items ( ) ) :
contrast = self . _get_contrast ( contrast_id )
for inference in inferences :
if contrast . z_stat_map :
used_id = contrast . z_stat_map . id
else :
used_id = contrast . stat_map . id
# inference . inference _ act . used ( used _ id )
self . bundle . used ( inference . inference_act . id , used_id )
# inference . inference _ act . wasAssociatedWith ( self . software )
self . bundle . wasAssociatedWith ( inference . inference_act . id , self . software . id )
# self . add _ object ( inference )
# Excursion set
# inference . excursion _ set . wasGeneratedBy ( inference . inference _ act )
self . bundle . wasGeneratedBy ( inference . excursion_set . id , inference . inference_act . id )
self . add_object ( inference . excursion_set )
self . add_object ( inference . excursion_set . coord_space )
if inference . excursion_set . visu is not None :
self . add_object ( inference . excursion_set . visu )
if inference . excursion_set . visu . file is not None :
self . add_object ( inference . excursion_set . visu . file )
# Copy " Excursion set map " file in export directory
self . add_object ( inference . excursion_set . file )
if inference . excursion_set . clust_map is not None :
self . add_object ( inference . excursion_set . clust_map )
self . add_object ( inference . excursion_set . clust_map . file )
self . add_object ( inference . excursion_set . clust_map . coord_space )
if inference . excursion_set . mip is not None :
self . add_object ( inference . excursion_set . mip )
self . add_object ( inference . excursion_set . mip . file )
# Height threshold
if inference . height_thresh . equiv_thresh is not None :
for equiv in inference . height_thresh . equiv_thresh :
self . add_object ( equiv )
self . add_object ( inference . height_thresh )
# Extent threshold
if inference . extent_thresh . equiv_thresh is not None :
for equiv in inference . extent_thresh . equiv_thresh :
self . add_object ( equiv )
self . add_object ( inference . extent_thresh )
# Display Mask ( potentially more than 1)
if inference . disp_mask :
for mask in inference . disp_mask : # inference . inference _ act . used ( mask )
self . bundle . used ( inference . inference_act . id , mask . id )
self . add_object ( mask )
# Create coordinate space entity
self . add_object ( mask . coord_space )
# Create " Display Mask Map " entity
self . add_object ( mask . file )
if mask . derfrom is not None :
self . bundle . wasDerivedFrom ( mask . id , mask . derfrom . id )
self . add_object ( mask . derfrom )
self . add_object ( mask . derfrom . file , export_file = False )
# Search Space
self . bundle . wasGeneratedBy ( inference . search_space . id , inference . inference_act . id )
# inference . search _ space . wasGeneratedBy ( inference . inference _ act )
self . add_object ( inference . search_space )
self . add_object ( inference . search_space . coord_space )
# Copy " Mask map " in export directory
self . add_object ( inference . search_space . file )
# Peak Definition
if inference . peak_criteria : # inference . inference _ act . used ( inference . peak _ criteria )
self . bundle . used ( inference . inference_act . id , inference . peak_criteria . id )
self . add_object ( inference . peak_criteria )
# Cluster Definition
if inference . cluster_criteria : # inference . inference _ act . used ( inference . cluster _ criteria )
self . bundle . used ( inference . inference_act . id , inference . cluster_criteria . id )
self . add_object ( inference . cluster_criteria )
if inference . clusters : # Clusters and peaks
for cluster in inference . clusters : # cluster . wasDerivedFrom ( inference . excursion _ set )
self . bundle . wasDerivedFrom ( cluster . id , inference . excursion_set . id )
self . add_object ( cluster )
for peak in cluster . peaks :
self . bundle . wasDerivedFrom ( peak . id , cluster . id )
self . add_object ( peak )
self . add_object ( peak . coordinate )
if cluster . cog is not None :
self . bundle . wasDerivedFrom ( cluster . cog . id , cluster . id )
self . add_object ( cluster . cog )
self . add_object ( cluster . cog . coordinate )
# Inference activity
# inference . inference _ act . wasAssociatedWith ( inference . software _ id )
# inference . inference _ act . used ( inference . height _ thresh )
self . bundle . used ( inference . inference_act . id , inference . height_thresh . id )
# inference . inference _ act . used ( inference . extent _ thresh )
self . bundle . used ( inference . inference_act . id , inference . extent_thresh . id )
self . bundle . used ( inference . inference_act . id , analysis_masks [ contrast . estimation . id ] )
self . add_object ( inference . inference_act )
# Write - out prov file
self . save_prov_to_files ( )
return self . out_dir
except Exception :
self . cleanup ( )
raise |
def callback ( self , filename , lines , ** kwargs ) :
"""Sends log lines to redis servers""" | self . _logger . debug ( 'Redis transport called' )
timestamp = self . get_timestamp ( ** kwargs )
if kwargs . get ( 'timestamp' , False ) :
del kwargs [ 'timestamp' ]
namespaces = self . _beaver_config . get_field ( 'redis_namespace' , filename )
if not namespaces :
namespaces = self . _namespace
namespaces = namespaces . split ( "," )
self . _logger . debug ( 'Got namespaces: ' . join ( namespaces ) )
data_type = self . _data_type
self . _logger . debug ( 'Got data type: ' + data_type )
server = self . _get_next_server ( )
self . _logger . debug ( 'Got redis server: ' + server [ 'url' ] )
pipeline = server [ 'redis' ] . pipeline ( transaction = False )
callback_map = { self . LIST_DATA_TYPE : pipeline . rpush , self . CHANNEL_DATA_TYPE : pipeline . publish , }
callback_method = callback_map [ data_type ]
for line in lines :
for namespace in namespaces :
callback_method ( namespace . strip ( ) , self . format ( filename , line , timestamp , ** kwargs ) )
try :
pipeline . execute ( )
except redis . exceptions . RedisError , exception :
self . _logger . warn ( 'Cannot push lines to redis server: ' + server [ 'url' ] )
raise TransportException ( exception ) |
def lstm_cell ( hidden_size ) :
"""Wrapper function to create an LSTM cell .""" | return tf . contrib . rnn . LSTMCell ( hidden_size , use_peepholes = True , state_is_tuple = True ) |
def update ( name , password = None , fullname = None , description = None , home = None , homedrive = None , logonscript = None , profile = None , expiration_date = None , expired = None , account_disabled = None , unlock_account = None , password_never_expires = None , disallow_change_password = None ) : # pylint : disable = anomalous - backslash - in - string
'''Updates settings for the windows user . Name is the only required parameter .
Settings will only be changed if the parameter is passed a value .
. . versionadded : : 2015.8.0
Args :
name ( str ) : The user name to update .
password ( str , optional ) : New user password in plain text .
fullname ( str , optional ) : The user ' s full name .
description ( str , optional ) : A brief description of the user account .
home ( str , optional ) : The path to the user ' s home directory .
homedrive ( str , optional ) : The drive letter to assign to the home
directory . Must be the Drive Letter followed by a colon . ie : U :
logonscript ( str , optional ) : The path to the logon script .
profile ( str , optional ) : The path to the user ' s profile directory .
expiration _ date ( date , optional ) : The date and time when the account
expires . Can be a valid date / time string . To set to never expire
pass the string ' Never ' .
expired ( bool , optional ) : Pass ` True ` to expire the account . The user
will be prompted to change their password at the next logon . Pass
` False ` to mark the account as ' not expired ' . You can ' t use this to
negate the expiration if the expiration was caused by the account
expiring . You ' ll have to change the ` expiration _ date ` as well .
account _ disabled ( bool , optional ) : True disables the account . False
enables the account .
unlock _ account ( bool , optional ) : True unlocks a locked user account .
False is ignored .
password _ never _ expires ( bool , optional ) : True sets the password to never
expire . False allows the password to expire .
disallow _ change _ password ( bool , optional ) : True blocks the user from
changing the password . False allows the user to change the password .
Returns :
bool : True if successful . False is unsuccessful .
CLI Example :
. . code - block : : bash
salt ' * ' user . update bob password = secret profile = C : \\ Users \\ Bob
home = \\ server \ homeshare \b ob homedrive = U :''' | # pylint : enable = anomalous - backslash - in - string
if six . PY2 :
name = _to_unicode ( name )
password = _to_unicode ( password )
fullname = _to_unicode ( fullname )
description = _to_unicode ( description )
home = _to_unicode ( home )
homedrive = _to_unicode ( homedrive )
logonscript = _to_unicode ( logonscript )
profile = _to_unicode ( profile )
# Make sure the user exists
# Return an object containing current settings for the user
try :
user_info = win32net . NetUserGetInfo ( None , name , 4 )
except win32net . error as exc :
log . error ( 'Failed to update user %s' , name )
log . error ( 'nbr: %s' , exc . winerror )
log . error ( 'ctx: %s' , exc . funcname )
log . error ( 'msg: %s' , exc . strerror )
return False
# Check parameters to update
# Update the user object with new settings
if password :
user_info [ 'password' ] = password
if home :
user_info [ 'home_dir' ] = home
if homedrive :
user_info [ 'home_dir_drive' ] = homedrive
if description :
user_info [ 'comment' ] = description
if logonscript :
user_info [ 'script_path' ] = logonscript
if fullname :
user_info [ 'full_name' ] = fullname
if profile :
user_info [ 'profile' ] = profile
if expiration_date :
if expiration_date == 'Never' :
user_info [ 'acct_expires' ] = win32netcon . TIMEQ_FOREVER
else :
try :
dt_obj = salt . utils . dateutils . date_cast ( expiration_date )
except ( ValueError , RuntimeError ) :
return 'Invalid Date/Time Format: {0}' . format ( expiration_date )
user_info [ 'acct_expires' ] = time . mktime ( dt_obj . timetuple ( ) )
if expired is not None :
if expired :
user_info [ 'password_expired' ] = 1
else :
user_info [ 'password_expired' ] = 0
if account_disabled is not None :
if account_disabled :
user_info [ 'flags' ] |= win32netcon . UF_ACCOUNTDISABLE
else :
user_info [ 'flags' ] &= ~ win32netcon . UF_ACCOUNTDISABLE
if unlock_account is not None :
if unlock_account :
user_info [ 'flags' ] &= ~ win32netcon . UF_LOCKOUT
if password_never_expires is not None :
if password_never_expires :
user_info [ 'flags' ] |= win32netcon . UF_DONT_EXPIRE_PASSWD
else :
user_info [ 'flags' ] &= ~ win32netcon . UF_DONT_EXPIRE_PASSWD
if disallow_change_password is not None :
if disallow_change_password :
user_info [ 'flags' ] |= win32netcon . UF_PASSWD_CANT_CHANGE
else :
user_info [ 'flags' ] &= ~ win32netcon . UF_PASSWD_CANT_CHANGE
# Apply new settings
try :
win32net . NetUserSetInfo ( None , name , 4 , user_info )
except win32net . error as exc :
log . error ( 'Failed to update user %s' , name )
log . error ( 'nbr: %s' , exc . winerror )
log . error ( 'ctx: %s' , exc . funcname )
log . error ( 'msg: %s' , exc . strerror )
return False
return True |
def classify_users ( X_test , model , classifier_type , meta_model , upper_cutoff ) :
"""Uses a trained model and the unlabelled features to associate users with labels .
The decision is done as per scikit - learn :
http : / / scikit - learn . org / stable / modules / generated / sklearn . multiclass . OneVsRestClassifier . html
http : / / scikit - learn . org / stable / modules / generated / sklearn . svm . LinearSVC . html # sklearn . svm . LinearSVC . predict
Inputs : - feature _ matrix : The graph based - features in either NumPy or SciPy sparse array format .
- model : A trained scikit - learn One - vs - All multi - label scheme of linear SVC models .
Output : - decision _ weights : A NumPy array containing the distance of each user from each label discriminator .""" | if classifier_type == "LinearSVC" :
prediction = model . decision_function ( X_test )
# prediction = penalize _ large _ classes ( prediction )
meta_prediction = meta_model . predict ( X_test )
meta_prediction = np . rint ( meta_prediction ) + 1
meta_prediction [ meta_prediction > upper_cutoff ] = upper_cutoff
meta_prediction [ meta_prediction < 2 ] = 2
prediction_indices = np . argsort ( prediction , axis = 1 )
prediction_row = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_col = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_data = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . float64 )
nnz_counter = 0
for i in range ( X_test . shape [ 0 ] ) :
jj = prediction_indices [ i , - int ( meta_prediction [ i ] ) : ]
dd = prediction [ i , jj ]
prediction_row [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = i
prediction_col [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = jj
prediction_data [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = dd
nnz_counter += int ( meta_prediction [ i ] )
prediction = spsp . coo_matrix ( ( prediction_data , ( prediction_row , prediction_col ) ) , shape = prediction . shape )
prediction = normalize ( prediction , norm = "l2" , axis = 0 )
elif classifier_type == "LogisticRegression" :
prediction = model . predict_proba ( X_test )
# prediction = penalize _ large _ classes ( prediction )
meta_prediction = meta_model . predict ( X_test )
meta_prediction = np . rint ( meta_prediction ) + 1
meta_prediction [ meta_prediction > upper_cutoff ] = upper_cutoff
meta_prediction [ meta_prediction < 2 ] = 2
prediction_indices = np . argsort ( prediction , axis = 1 )
prediction_row = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_col = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_data = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . float64 )
nnz_counter = 0
for i in range ( X_test . shape [ 0 ] ) :
jj = prediction_indices [ i , - int ( meta_prediction [ i ] ) : ]
dd = prediction [ i , jj ]
prediction_row [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = i
prediction_col [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = jj
prediction_data [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = dd
nnz_counter += int ( meta_prediction [ i ] )
prediction = spsp . coo_matrix ( ( prediction_data , ( prediction_row , prediction_col ) ) , shape = prediction . shape )
elif classifier_type == "RandomForest" :
if issparse ( X_test ) :
prediction = model . predict_proba ( X_test . tocsr ( ) )
else :
prediction = model . predict_proba ( X_test )
# prediction = penalize _ large _ classes ( prediction )
meta_prediction = meta_model . predict ( X_test )
meta_prediction = np . rint ( meta_prediction ) + 1
meta_prediction [ meta_prediction > upper_cutoff ] = upper_cutoff
meta_prediction [ meta_prediction < 2 ] = 2
prediction_indices = np . argsort ( prediction , axis = 1 )
prediction_row = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_col = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . int32 )
prediction_data = np . empty ( int ( np . sum ( meta_prediction ) ) , dtype = np . float64 )
nnz_counter = 0
for i in range ( X_test . shape [ 0 ] ) :
jj = prediction_indices [ i , - int ( meta_prediction [ i ] ) : ]
dd = prediction [ i , jj ]
prediction_row [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = i
prediction_col [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = jj
prediction_data [ nnz_counter : nnz_counter + int ( meta_prediction [ i ] ) ] = dd
nnz_counter += int ( meta_prediction [ i ] )
prediction = spsp . coo_matrix ( ( prediction_data , ( prediction_row , prediction_col ) ) , shape = prediction . shape )
else :
print ( "Invalid classifier type." )
raise RuntimeError
return prediction |
def format_timedelta ( dt : timedelta ) -> str :
"""Formats timedelta to readable format , e . g . 1h30min .
: param dt : timedelta
: return : str""" | seconds = int ( dt . total_seconds ( ) )
days , remainder = divmod ( seconds , 86400 )
hours , remainder = divmod ( remainder , 3600 )
minutes , seconds = divmod ( remainder , 60 )
s = ""
if days > 0 :
s += str ( days ) + "d"
if hours > 0 :
s += str ( hours ) + "h"
if minutes > 0 :
s += str ( minutes ) + "min"
if s == "" :
s = "0min"
return s |
def _fill_get_item_cache ( self , catalog , key ) :
"""get from redis , cache locally then return
: param catalog : catalog name
: param key :
: return :""" | lang = self . _get_lang ( )
keylist = self . get_all ( catalog )
self . ITEM_CACHE [ lang ] [ catalog ] = dict ( [ ( i [ 'value' ] , i [ 'name' ] ) for i in keylist ] )
return self . ITEM_CACHE [ lang ] [ catalog ] . get ( key ) |
def get_out_srvc_node_ip_addr ( cls , tenant_id ) :
"""Retrieves the OUT service node IP address .""" | if tenant_id not in cls . serv_obj_dict :
LOG . error ( "Fabric not prepared for tenant %s" , tenant_id )
return
tenant_obj = cls . serv_obj_dict . get ( tenant_id )
out_subnet_dict = tenant_obj . get_out_ip_addr ( )
next_hop = str ( netaddr . IPAddress ( out_subnet_dict . get ( 'subnet' ) ) + 2 )
return next_hop |
def update_columns_dict ( self , kwargs ) :
"""Update the value of a column or multiple columns by passing as a dict .
For observable columns , provide the label of the observable itself and
it will be found ( so long as it does not conflict with an existing
non - observable column ) .""" | # make sure to do the geometric things that are needed for some of the
# ComputedColumns first
for key in ( 'triangles' , 'vertices' , 'centers' , 'vnormals' , 'tnormals' ) :
if key in kwargs . keys ( ) :
self . __setitem__ ( key , kwargs . pop ( key ) )
for k , v in kwargs . items ( ) :
if isinstance ( v , float ) and k not in self . _scalar_fields : # Then let ' s make an array with the correct length full of this
# scalar
# NOTE : this won ' t work for Nx3 ' s , but that
# really shouldn ' t ever happen since they should be set
# within the init .
# v = np . ones ( self . Ntriangles ) * v
if self . _compute_at_vertices :
v = np . full ( self . Nvertices , v )
else :
v = np . full ( self . Ntriangles , v )
self . __setitem__ ( k , v )
if isinstance ( v , ComputedColumn ) : # then let ' s update the mesh instance to correctly handle
# inheritance
self . __getitem__ ( k ) . _mesh = self |
def normalize_commit_message ( commit_message ) :
"""Return a tuple of title and body from the commit message""" | split_commit_message = commit_message . split ( "\n" )
title = split_commit_message [ 0 ]
body = "\n" . join ( split_commit_message [ 1 : ] )
return title , body . lstrip ( "\n" ) |
def get_urlpatterns ( self ) :
"""Returns the URL patterns managed by the considered factory / application .""" | return [ path ( _ ( 'topic/<str:slug>-<int:pk>/lock/' ) , self . topic_lock_view . as_view ( ) , name = 'topic_lock' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/unlock/' ) , self . topic_unlock_view . as_view ( ) , name = 'topic_unlock' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/delete/' ) , self . topic_delete_view . as_view ( ) , name = 'topic_delete' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/move/' ) , self . topic_move_view . as_view ( ) , name = 'topic_move' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/change/topic/' ) , self . topic_update_to_normal_topic_view . as_view ( ) , name = 'topic_update_to_post' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/change/sticky/' ) , self . topic_update_to_sticky_topic_view . as_view ( ) , name = 'topic_update_to_sticky' , ) , path ( _ ( 'topic/<str:slug>-<int:pk>/change/announce/' ) , self . topic_update_to_announce_view . as_view ( ) , name = 'topic_update_to_announce' , ) , path ( _ ( 'queue/' ) , self . moderation_queue_list_view . as_view ( ) , name = 'queue' ) , path ( _ ( 'queue/<int:pk>/' ) , self . moderation_queue_detail_view . as_view ( ) , name = 'queued_post' , ) , path ( _ ( 'queue/<int:pk>/approve/' ) , self . post_approve_view . as_view ( ) , name = 'approve_queued_post' , ) , path ( _ ( 'queue/<int:pk>/disapprove/' ) , self . post_disapprove_view . as_view ( ) , name = 'disapprove_queued_post' , ) , ] |
def generate_semantic_data_key ( used_semantic_keys ) :
"""Create a new and unique semantic data key
: param list used _ semantic _ keys : Handed list of keys already in use
: rtype : str
: return : semantic _ data _ id""" | semantic_data_id_counter = - 1
while True :
semantic_data_id_counter += 1
if "semantic data key " + str ( semantic_data_id_counter ) not in used_semantic_keys :
break
return "semantic data key " + str ( semantic_data_id_counter ) |
def convert_default ( self , field , ** params ) :
"""Return raw field .""" | for klass , ma_field in self . TYPE_MAPPING :
if isinstance ( field , klass ) :
return ma_field ( ** params )
return fields . Raw ( ** params ) |
def hostedzone_from_element ( zone ) :
"""Construct a L { HostedZone } instance from a I { HostedZone } XML element .""" | return HostedZone ( name = maybe_bytes_to_unicode ( zone . find ( "Name" ) . text ) . encode ( "ascii" ) . decode ( "idna" ) , identifier = maybe_bytes_to_unicode ( zone . find ( "Id" ) . text ) . replace ( u"/hostedzone/" , u"" ) , rrset_count = int ( zone . find ( "ResourceRecordSetCount" ) . text ) , reference = maybe_bytes_to_unicode ( zone . find ( "CallerReference" ) . text ) , ) |
def instruction_NEG_memory ( self , opcode , ea , m ) :
"""Negate memory""" | if opcode == 0x0 and ea == 0x0 and m == 0x0 :
self . _wrong_NEG += 1
if self . _wrong_NEG > 10 :
raise RuntimeError ( "Wrong PC ???" )
else :
self . _wrong_NEG = 0
r = m * - 1
# same as : r = ~ m + 1
# log . debug ( " $ % 04x NEG $ % 02x from % 04x to $ % 02x " % (
# self . program _ counter , m , ea , r ,
self . clear_NZVC ( )
self . update_NZVC_8 ( 0 , m , r )
return ea , r & 0xff |
def build_chunk ( oscillators ) :
"""Build an audio chunk and progress the oscillator states .
Args :
oscillators ( list ) : A list of oscillator . Oscillator objects
to build chunks from
Returns :
str : a string of audio sample bytes ready to be written to a wave file""" | step_random_processes ( oscillators )
subchunks = [ ]
for osc in oscillators :
osc . amplitude . step_amp ( )
osc_chunk = osc . get_samples ( config . CHUNK_SIZE )
if osc_chunk is not None :
subchunks . append ( osc_chunk )
if len ( subchunks ) :
new_chunk = sum ( subchunks )
else :
new_chunk = numpy . zeros ( config . CHUNK_SIZE )
# If we exceed the maximum amplitude , handle it gracefully
chunk_amplitude = amplitude . find_amplitude ( new_chunk )
if chunk_amplitude > config . MAX_AMPLITUDE : # Normalize the amplitude chunk to mitigate immediate clipping
new_chunk = amplitude . normalize_amplitude ( new_chunk , config . MAX_AMPLITUDE )
# Pick some of the offending oscillators ( and some random others )
# and lower their drift targets
avg_amp = ( sum ( osc . amplitude . value for osc in oscillators ) / len ( oscillators ) )
for osc in oscillators :
if ( osc . amplitude . value > avg_amp and rand . prob_bool ( 0.1 ) or rand . prob_bool ( 0.01 ) ) :
osc . amplitude . drift_target = rand . weighted_rand ( [ ( - 5 , 1 ) , ( 0 , 10 ) ] )
osc . amplitude . change_rate = rand . weighted_rand ( osc . amplitude . change_rate_weights )
return new_chunk . astype ( config . SAMPLE_DATA_TYPE ) . tostring ( ) |
def set_reload_on_exception_params ( self , do_reload = None , etype = None , evalue = None , erepr = None ) :
"""Sets workers reload on exceptions parameters .
: param bool do _ reload : Reload a worker when an exception is raised .
: param str etype : Reload a worker when a specific exception type is raised .
: param str evalue : Reload a worker when a specific exception value is raised .
: param str erepr : Reload a worker when a specific exception type + value ( language - specific ) is raised .""" | self . _set ( 'reload-on-exception' , do_reload , cast = bool )
self . _set ( 'reload-on-exception-type' , etype )
self . _set ( 'reload-on-exception-value' , evalue )
self . _set ( 'reload-on-exception-repr' , erepr )
return self . _section |
def main ( arguments ) :
"""Parse arguments , request the urls , notify if different .""" | formatter_class = argparse . ArgumentDefaultsHelpFormatter
parser = argparse . ArgumentParser ( description = __doc__ , formatter_class = formatter_class )
parser . add_argument ( 'infile' , help = "Input file" , type = argparse . FileType ( 'r' ) )
parser . add_argument ( '-o' , '--outfile' , help = "Output file" , default = sys . stdout , type = argparse . FileType ( 'w' ) )
args = parser . parse_args ( arguments )
urls = args . infile . read ( ) . splitlines ( )
api_token = keyring . get_password ( 'pushover' , 'api_token' )
pushover_user = keyring . get_password ( 'pushover' , 'user' )
pushover = Pushover ( api_token , pushover_user )
for url in urls :
domain = urlparse ( url ) . netloc
urlpath = urlparse ( url ) . path
url_dashes = re . sub ( r'/' , '-' , urlpath )
cache = os . path . expanduser ( "~/.urlmon-cache" )
if not os . path . isdir ( cache ) :
os . mkdir ( cache , mode = 0o755 )
filename = domain + url_dashes + '.html'
filepath = os . path . join ( cache , filename )
html = requests . get ( url ) . text
if os . path . isfile ( filepath ) :
with open ( filepath ) as r :
before = r . read ( )
if html == before :
logger . info ( "{} is unchanged" . format ( url ) )
else :
msg = "{} changed" . format ( url )
logger . info ( msg )
logger . debug ( diff ( before , html ) )
response = pushover . push ( msg )
logger . debug ( "Pushover notification sent: " "{}" . format ( response . status_code ) )
else :
logger . info ( "New url: {}" . format ( filename ) )
with open ( filepath , 'w' ) as w :
w . write ( html )
logger . info ( "Wrote file to cache: {}" . format ( filename ) ) |
def set_range ( self , range ) :
"""Set the range of the instrument .
A range is a tuple of two Notes or note strings .""" | if type ( range [ 0 ] ) == str :
range [ 0 ] = Note ( range [ 0 ] )
range [ 1 ] = Note ( range [ 1 ] )
if not hasattr ( range [ 0 ] , 'name' ) :
raise UnexpectedObjectError ( "Unexpected object '%s'. " "Expecting a mingus.containers.Note object" % range [ 0 ] )
self . range = range |
def add ( self , obj = None , filename = None , data = None , info = { } , ** kwargs ) :
"""If a filename is supplied , it will be used . Otherwise , a
filename will be generated from the supplied object . Note that
if the explicit filename uses the { timestamp } field , it will
be formatted upon export .
The data to be archived is either supplied explicitly as
' data ' or automatically rendered from the object .""" | if [ filename , obj ] == [ None , None ] :
raise Exception ( "Either filename or a HoloViews object is " "needed to create an entry in the archive." )
elif obj is None and not self . parse_fields ( filename ) . issubset ( { 'timestamp' } ) :
raise Exception ( "Only the {timestamp} formatter may be used unless an object is supplied." )
elif [ obj , data ] == [ None , None ] :
raise Exception ( "Either an object or explicit data must be " "supplied to create an entry in the archive." )
elif data and 'mime_type' not in info :
raise Exception ( "The mime-type must be supplied in the info dictionary " "when supplying data directly" )
self . _validate_formatters ( )
entries = [ ]
if data is None :
for exporter in self . exporters :
rendered = exporter ( obj )
if rendered is None :
continue
( data , new_info ) = rendered
info = dict ( info , ** new_info )
entries . append ( ( data , info ) )
else :
entries . append ( ( data , info ) )
for ( data , info ) in entries :
self . _add_content ( obj , data , info , filename = filename ) |
def call_alert ( * args , ** kwargs ) :
'''Lamp alert
Options :
* * * id * * : Specifies a device ID . Can be a comma - separated values . All , if omitted .
* * * on * * : Turns on or off an alert . Default is True .
CLI Example :
. . code - block : : bash
salt ' * ' hue . alert
salt ' * ' hue . alert id = 1
salt ' * ' hue . alert id = 1,2,3 on = false''' | res = dict ( )
devices = _get_lights ( )
for dev_id in 'id' not in kwargs and sorted ( devices . keys ( ) ) or _get_devices ( kwargs ) :
res [ dev_id ] = _set ( dev_id , { "alert" : kwargs . get ( "on" , True ) and "lselect" or "none" } )
return res |
def PopItem ( self ) :
"""Pops an item off the queue .
If no ZeroMQ socket has been created , one will be created the first
time this method is called .
Returns :
object : item from the queue .
Raises :
KeyboardInterrupt : if the process is sent a KeyboardInterrupt while
popping an item .
QueueEmpty : if the queue is empty , and no item could be popped within the
queue timeout .
RuntimeError : if closed or terminate event is missing .
zmq . error . ZMQError : if a ZeroMQ error occurs .""" | if not self . _zmq_socket :
self . _CreateZMQSocket ( )
if not self . _closed_event or not self . _terminate_event :
raise RuntimeError ( 'Missing closed or terminate event.' )
logger . debug ( 'Pop on {0:s} queue, port {1:d}' . format ( self . name , self . port ) )
last_retry_timestamp = time . time ( ) + self . timeout_seconds
while not self . _closed_event . is_set ( ) or not self . _terminate_event . is_set ( ) :
try :
return self . _ReceiveItemOnActivity ( self . _zmq_socket )
except errors . QueueEmpty :
if time . time ( ) > last_retry_timestamp :
raise
except KeyboardInterrupt :
self . Close ( abort = True )
raise |
def add_auth_attempt ( self , auth_type , successful , ** kwargs ) :
""": param username :
: param password :
: param auth _ type : possible values :
plain : plaintext username / password
: return :""" | entry = { 'timestamp' : datetime . utcnow ( ) , 'auth' : auth_type , 'id' : uuid . uuid4 ( ) , 'successful' : successful }
log_string = ''
for key , value in kwargs . iteritems ( ) :
if key == 'challenge' or key == 'response' :
entry [ key ] = repr ( value )
else :
entry [ key ] = value
log_string += '{0}:{1}, ' . format ( key , value )
self . login_attempts . append ( entry ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.