idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
25,900
def play_from_queue ( self , index , start = True ) : if not self . speaker_info : self . get_speaker_info ( ) uri = 'x-rincon-queue:{0}#0' . format ( self . uid ) self . avTransport . SetAVTransportURI ( [ ( 'InstanceID' , 0 ) , ( 'CurrentURI' , uri ) , ( 'CurrentURIMetaData' , '' ) ] ) self . avTransport . Seek ( [ ( 'InstanceID' , 0 ) , ( 'Unit' , 'TRACK_NR' ) , ( 'Target' , index + 1 ) ] ) if start : self . play ( )
Play a track from the queue by index .
25,901
def play_uri ( self , uri = '' , meta = '' , title = '' , start = True , force_radio = False ) : if meta == '' and title != '' : meta_template = '<DIDL-Lite xmlns:dc="http://purl.org/dc/elements' '/1.1/" xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/" ' 'xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/" ' 'xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/">' '<item id="R:0/0/0" parentID="R:0/0" restricted="true">' '<dc:title>{title}</dc:title><upnp:class>' 'object.item.audioItem.audioBroadcast</upnp:class><desc ' 'id="cdudn" nameSpace="urn:schemas-rinconnetworks-com:' 'metadata-1-0/">{service}</desc></item></DIDL-Lite>' tunein_service = 'SA_RINCON65031_' meta = meta_template . format ( title = escape ( title ) , service = tunein_service ) if force_radio : colon = uri . find ( ':' ) if colon > 0 : uri = 'x-rincon-mp3radio{0}' . format ( uri [ colon : ] ) self . avTransport . SetAVTransportURI ( [ ( 'InstanceID' , 0 ) , ( 'CurrentURI' , uri ) , ( 'CurrentURIMetaData' , meta ) ] ) if start : return self . play ( ) return False
Play a URI .
25,902
def volume ( self , volume ) : volume = int ( volume ) volume = max ( 0 , min ( volume , 100 ) ) self . renderingControl . SetVolume ( [ ( 'InstanceID' , 0 ) , ( 'Channel' , 'Master' ) , ( 'DesiredVolume' , volume ) ] )
Set the speaker s volume .
25,903
def bass ( self , bass ) : bass = int ( bass ) bass = max ( - 10 , min ( bass , 10 ) ) self . renderingControl . SetBass ( [ ( 'InstanceID' , 0 ) , ( 'DesiredBass' , bass ) ] )
Set the speaker s bass .
25,904
def treble ( self , treble ) : treble = int ( treble ) treble = max ( - 10 , min ( treble , 10 ) ) self . renderingControl . SetTreble ( [ ( 'InstanceID' , 0 ) , ( 'DesiredTreble' , treble ) ] )
Set the speaker s treble .
25,905
def _parse_zone_group_state ( self ) : def parse_zone_group_member ( member_element ) : member_attribs = member_element . attrib ip_addr = member_attribs [ 'Location' ] . split ( '//' ) [ 1 ] . split ( ':' ) [ 0 ] zone = config . SOCO_CLASS ( ip_addr ) zone . _uid = member_attribs [ 'UUID' ] zone . _player_name = member_attribs [ 'ZoneName' ] is_visible = False if member_attribs . get ( 'Invisible' ) == '1' else True if is_visible : self . _visible_zones . add ( zone ) self . _all_zones . add ( zone ) return zone zgs = self . zoneGroupTopology . GetZoneGroupState ( cache_timeout = 5 ) [ 'ZoneGroupState' ] if zgs == self . _zgs_cache : return self . _zgs_cache = zgs tree = XML . fromstring ( zgs . encode ( 'utf-8' ) ) self . _groups . clear ( ) self . _all_zones . clear ( ) self . _visible_zones . clear ( ) for group_element in tree . findall ( 'ZoneGroup' ) : coordinator_uid = group_element . attrib [ 'Coordinator' ] group_uid = group_element . attrib [ 'ID' ] group_coordinator = None members = set ( ) for member_element in group_element . findall ( 'ZoneGroupMember' ) : zone = parse_zone_group_member ( member_element ) if zone . _uid == coordinator_uid : group_coordinator = zone zone . _is_coordinator = True else : zone . _is_coordinator = False zone . _is_bridge = True if member_element . attrib . get ( 'IsZoneBridge' ) == '1' else False members . add ( zone ) for satellite_element in member_element . findall ( 'Satellite' ) : zone = parse_zone_group_member ( satellite_element ) members . add ( zone ) self . _groups . add ( ZoneGroup ( group_uid , group_coordinator , members ) )
The Zone Group State contains a lot of useful information .
25,906
def partymode ( self ) : [ zone . join ( self ) for zone in self . visible_zones if zone is not self ]
Put all the speakers in the network in the same group a . k . a Party Mode .
25,907
def switch_to_line_in ( self , source = None ) : if source : uid = source . uid else : uid = self . uid self . avTransport . SetAVTransportURI ( [ ( 'InstanceID' , 0 ) , ( 'CurrentURI' , 'x-rincon-stream:{0}' . format ( uid ) ) , ( 'CurrentURIMetaData' , '' ) ] )
Switch the speaker s input to line - in .
25,908
def get_current_track_info ( self ) : response = self . avTransport . GetPositionInfo ( [ ( 'InstanceID' , 0 ) , ( 'Channel' , 'Master' ) ] ) track = { 'title' : '' , 'artist' : '' , 'album' : '' , 'album_art' : '' , 'position' : '' } track [ 'playlist_position' ] = response [ 'Track' ] track [ 'duration' ] = response [ 'TrackDuration' ] track [ 'uri' ] = response [ 'TrackURI' ] track [ 'position' ] = response [ 'RelTime' ] metadata = response [ 'TrackMetaData' ] track [ 'metadata' ] = metadata if metadata != '' and track [ 'duration' ] == '0:00:00' : metadata = XML . fromstring ( really_utf8 ( metadata ) ) trackinfo = metadata . findtext ( './/{urn:schemas-rinconnetworks-com:' 'metadata-1-0/}streamContent' ) or '' index = trackinfo . find ( ' - ' ) if index > - 1 : track [ 'artist' ] = trackinfo [ : index ] track [ 'title' ] = trackinfo [ index + 3 : ] else : track [ 'title' ] = metadata . findtext ( './/{http://purl.org/dc/' 'elements/1.1/}title' ) if not track [ 'title' ] : track [ 'title' ] = trackinfo elif metadata not in ( '' , 'NOT_IMPLEMENTED' , None ) : metadata = XML . fromstring ( really_utf8 ( metadata ) ) md_title = metadata . findtext ( './/{http://purl.org/dc/elements/1.1/}title' ) md_artist = metadata . findtext ( './/{http://purl.org/dc/elements/1.1/}creator' ) md_album = metadata . findtext ( './/{urn:schemas-upnp-org:metadata-1-0/upnp/}album' ) track [ 'title' ] = "" if md_title : track [ 'title' ] = md_title track [ 'artist' ] = "" if md_artist : track [ 'artist' ] = md_artist track [ 'album' ] = "" if md_album : track [ 'album' ] = md_album album_art_url = metadata . findtext ( './/{urn:schemas-upnp-org:metadata-1-0/upnp/}albumArtURI' ) if album_art_url is not None : track [ 'album_art' ] = self . music_library . build_album_art_full_uri ( album_art_url ) return track
Get information about the currently playing track .
25,909
def get_speaker_info ( self , refresh = False , timeout = None ) : if self . speaker_info and refresh is False : return self . speaker_info else : response = requests . get ( 'http://' + self . ip_address + ':1400/xml/device_description.xml' , timeout = timeout ) dom = XML . fromstring ( response . content ) device = dom . find ( '{urn:schemas-upnp-org:device-1-0}device' ) if device is not None : self . speaker_info [ 'zone_name' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}roomName' ) self . speaker_info [ 'player_icon' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}iconList/' '{urn:schemas-upnp-org:device-1-0}icon/' '{urn:schemas-upnp-org:device-1-0}url' ) self . speaker_info [ 'uid' ] = self . uid self . speaker_info [ 'serial_number' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}serialNum' ) self . speaker_info [ 'software_version' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}softwareVersion' ) self . speaker_info [ 'hardware_version' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}hardwareVersion' ) self . speaker_info [ 'model_number' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}modelNumber' ) self . speaker_info [ 'model_name' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}modelName' ) self . speaker_info [ 'display_version' ] = device . findtext ( '{urn:schemas-upnp-org:device-1-0}displayVersion' ) mac = self . speaker_info [ 'serial_number' ] . split ( ':' ) [ 0 ] self . speaker_info [ 'mac_address' ] = mac return self . speaker_info return None
Get information about the Sonos speaker .
25,910
def get_current_transport_info ( self ) : response = self . avTransport . GetTransportInfo ( [ ( 'InstanceID' , 0 ) , ] ) playstate = { 'current_transport_status' : '' , 'current_transport_state' : '' , 'current_transport_speed' : '' } playstate [ 'current_transport_state' ] = response [ 'CurrentTransportState' ] playstate [ 'current_transport_status' ] = response [ 'CurrentTransportStatus' ] playstate [ 'current_transport_speed' ] = response [ 'CurrentSpeed' ] return playstate
Get the current playback state .
25,911
def get_queue ( self , start = 0 , max_items = 100 , full_album_art_uri = False ) : queue = [ ] response = self . contentDirectory . Browse ( [ ( 'ObjectID' , 'Q:0' ) , ( 'BrowseFlag' , 'BrowseDirectChildren' ) , ( 'Filter' , '*' ) , ( 'StartingIndex' , start ) , ( 'RequestedCount' , max_items ) , ( 'SortCriteria' , '' ) ] ) result = response [ 'Result' ] metadata = { } for tag in [ 'NumberReturned' , 'TotalMatches' , 'UpdateID' ] : metadata [ camel_to_underscore ( tag ) ] = int ( response [ tag ] ) if not result : return Queue ( queue , ** metadata ) items = from_didl_string ( result ) for item in items : if full_album_art_uri : self . music_library . _update_album_art_to_full_uri ( item ) queue . append ( item ) return Queue ( queue , ** metadata )
Get information about the queue .
25,912
def add_uri_to_queue ( self , uri , position = 0 , as_next = False ) : res = [ DidlResource ( uri = uri , protocol_info = "x-rincon-playlist:*:*:*" ) ] item = DidlObject ( resources = res , title = '' , parent_id = '' , item_id = '' ) return self . add_to_queue ( item , position , as_next )
Add the URI to the queue .
25,913
def add_to_queue ( self , queueable_item , position = 0 , as_next = False ) : metadata = to_didl_string ( queueable_item ) response = self . avTransport . AddURIToQueue ( [ ( 'InstanceID' , 0 ) , ( 'EnqueuedURI' , queueable_item . resources [ 0 ] . uri ) , ( 'EnqueuedURIMetaData' , metadata ) , ( 'DesiredFirstTrackNumberEnqueued' , position ) , ( 'EnqueueAsNext' , int ( as_next ) ) ] ) qnumber = response [ 'FirstTrackNumberEnqueued' ] return int ( qnumber )
Add a queueable item to the queue .
25,914
def add_multiple_to_queue ( self , items , container = None ) : if container is not None : container_uri = container . resources [ 0 ] . uri container_metadata = to_didl_string ( container ) else : container_uri = '' container_metadata = '' chunk_size = 16 item_list = list ( items ) for index in range ( 0 , len ( item_list ) , chunk_size ) : chunk = item_list [ index : index + chunk_size ] uris = ' ' . join ( [ item . resources [ 0 ] . uri for item in chunk ] ) uri_metadata = ' ' . join ( [ to_didl_string ( item ) for item in chunk ] ) self . avTransport . AddMultipleURIsToQueue ( [ ( 'InstanceID' , 0 ) , ( 'UpdateID' , 0 ) , ( 'NumberOfURIs' , len ( chunk ) ) , ( 'EnqueuedURIs' , uris ) , ( 'EnqueuedURIsMetaData' , uri_metadata ) , ( 'ContainerURI' , container_uri ) , ( 'ContainerMetaData' , container_metadata ) , ( 'DesiredFirstTrackNumberEnqueued' , 0 ) , ( 'EnqueueAsNext' , 0 ) ] )
Add a sequence of items to the queue .
25,915
def remove_from_queue ( self , index ) : updid = '0' objid = 'Q:0/' + str ( index + 1 ) self . avTransport . RemoveTrackFromQueue ( [ ( 'InstanceID' , 0 ) , ( 'ObjectID' , objid ) , ( 'UpdateID' , updid ) , ] )
Remove a track from the queue by index . The index number is required as an argument where the first index is 0 .
25,916
def get_favorite_radio_shows ( self , start = 0 , max_items = 100 ) : message = 'The output type of this method will probably change in ' 'the future to use SoCo data structures' warnings . warn ( message , stacklevel = 2 ) return self . __get_favorites ( RADIO_SHOWS , start , max_items )
Get favorite radio shows from Sonos Radio app .
25,917
def get_favorite_radio_stations ( self , start = 0 , max_items = 100 ) : message = 'The output type of this method will probably change in ' 'the future to use SoCo data structures' warnings . warn ( message , stacklevel = 2 ) return self . __get_favorites ( RADIO_STATIONS , start , max_items )
Get favorite radio stations from Sonos Radio app .
25,918
def get_sonos_favorites ( self , start = 0 , max_items = 100 ) : message = 'The output type of this method will probably change in ' 'the future to use SoCo data structures' warnings . warn ( message , stacklevel = 2 ) return self . __get_favorites ( SONOS_FAVORITES , start , max_items )
Get Sonos favorites .
25,919
def create_sonos_playlist ( self , title ) : response = self . avTransport . CreateSavedQueue ( [ ( 'InstanceID' , 0 ) , ( 'Title' , title ) , ( 'EnqueuedURI' , '' ) , ( 'EnqueuedURIMetaData' , '' ) , ] ) item_id = response [ 'AssignedObjectID' ] obj_id = item_id . split ( ':' , 2 ) [ 1 ] uri = "file:///jffs/settings/savedqueues.rsq#{0}" . format ( obj_id ) res = [ DidlResource ( uri = uri , protocol_info = "x-rincon-playlist:*:*:*" ) ] return DidlPlaylistContainer ( resources = res , title = title , parent_id = 'SQ:' , item_id = item_id )
Create a new empty Sonos playlist .
25,920
def create_sonos_playlist_from_queue ( self , title ) : response = self . avTransport . SaveQueue ( [ ( 'InstanceID' , 0 ) , ( 'Title' , title ) , ( 'ObjectID' , '' ) ] ) item_id = response [ 'AssignedObjectID' ] obj_id = item_id . split ( ':' , 2 ) [ 1 ] uri = "file:///jffs/settings/savedqueues.rsq#{0}" . format ( obj_id ) res = [ DidlResource ( uri = uri , protocol_info = "x-rincon-playlist:*:*:*" ) ] return DidlPlaylistContainer ( resources = res , title = title , parent_id = 'SQ:' , item_id = item_id )
Create a new Sonos playlist from the current queue .
25,921
def remove_sonos_playlist ( self , sonos_playlist ) : object_id = getattr ( sonos_playlist , 'item_id' , sonos_playlist ) return self . contentDirectory . DestroyObject ( [ ( 'ObjectID' , object_id ) ] )
Remove a Sonos playlist .
25,922
def add_item_to_sonos_playlist ( self , queueable_item , sonos_playlist ) : response , _ = self . music_library . _music_lib_search ( sonos_playlist . item_id , 0 , 1 ) update_id = response [ 'UpdateID' ] metadata = to_didl_string ( queueable_item ) self . avTransport . AddURIToSavedQueue ( [ ( 'InstanceID' , 0 ) , ( 'UpdateID' , update_id ) , ( 'ObjectID' , sonos_playlist . item_id ) , ( 'EnqueuedURI' , queueable_item . resources [ 0 ] . uri ) , ( 'EnqueuedURIMetaData' , metadata ) , ( 'AddAtIndex' , 4294967295 ) ] )
Adds a queueable item to a Sonos playlist .
25,923
def get_sleep_timer ( self ) : resp = self . avTransport . GetRemainingSleepTimerDuration ( [ ( 'InstanceID' , 0 ) , ] ) if resp [ 'RemainingSleepTimerDuration' ] : times = resp [ 'RemainingSleepTimerDuration' ] . split ( ':' ) return ( int ( times [ 0 ] ) * 3600 + int ( times [ 1 ] ) * 60 + int ( times [ 2 ] ) ) else : return None
Retrieves remaining sleep time if any
25,924
def get_sonos_playlist_by_attr ( self , attr_name , match ) : for sonos_playlist in self . get_sonos_playlists ( ) : if getattr ( sonos_playlist , attr_name ) == match : return sonos_playlist raise ValueError ( 'No match on "{0}" for value "{1}"' . format ( attr_name , match ) )
Return the first Sonos Playlist DidlPlaylistContainer that matches the attribute specified .
25,925
def really_unicode ( in_string ) : if isinstance ( in_string , StringType ) : for args in ( ( 'utf-8' , ) , ( 'latin-1' , ) , ( 'ascii' , 'replace' ) ) : try : in_string = in_string . decode ( * args ) break except UnicodeDecodeError : continue if not isinstance ( in_string , UnicodeType ) : raise ValueError ( '%s is not a string at all.' % in_string ) return in_string
Make a string unicode . Really .
25,926
def camel_to_underscore ( string ) : string = FIRST_CAP_RE . sub ( r'\1_\2' , string ) return ALL_CAP_RE . sub ( r'\1_\2' , string ) . lower ( )
Convert camelcase to lowercase and underscore .
25,927
def prettify ( unicode_text ) : import xml . dom . minidom reparsed = xml . dom . minidom . parseString ( unicode_text . encode ( 'utf-8' ) ) return reparsed . toprettyxml ( indent = " " , newl = "\n" )
Return a pretty - printed version of a unicode XML string .
25,928
def prepare_headers ( self , http_headers , soap_action ) : headers = { 'Content-Type' : 'text/xml; charset="utf-8"' } if soap_action is not None : headers . update ( { 'SOAPACTION' : '"{}"' . format ( soap_action ) } ) if http_headers is not None : headers . update ( http_headers ) return headers
Prepare the http headers for sending .
25,929
def prepare_soap_body ( self , method , parameters , namespace ) : tags = [ ] for name , value in parameters : tag = "<{name}>{value}</{name}>" . format ( name = name , value = escape ( "%s" % value , { '"' : "&quot;" } ) ) tags . append ( tag ) wrapped_params = "" . join ( tags ) if namespace is not None : soap_body = ( '<{method} xmlns="{namespace}">' '{params}' '</{method}>' . format ( method = method , params = wrapped_params , namespace = namespace ) ) else : soap_body = ( '<{method}>' '{params}' '</{method}>' . format ( method = method , params = wrapped_params ) ) return soap_body
Prepare the SOAP message body for sending .
25,930
def prepare_soap_envelope ( self , prepared_soap_header , prepared_soap_body ) : soap_env_template = ( '<?xml version="1.0"?>' '<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"' ' s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">' '{soap_header}' '<s:Body>' '{soap_body}' '</s:Body>' '</s:Envelope>' ) return soap_env_template . format ( soap_header = prepared_soap_header , soap_body = prepared_soap_body )
Prepare the SOAP Envelope for sending .
25,931
def prepare ( self ) : headers = self . prepare_headers ( self . http_headers , self . soap_action ) soap_header = self . prepare_soap_header ( self . soap_header ) soap_body = self . prepare_soap_body ( self . method , self . parameters , self . namespace ) data = self . prepare_soap_envelope ( soap_header , soap_body ) return ( headers , data )
Prepare the SOAP message for sending to the server .
25,932
def call ( self ) : headers , data = self . prepare ( ) if _LOG . isEnabledFor ( logging . DEBUG ) : _LOG . debug ( "Sending %s, %s" , headers , prettify ( data ) ) response = requests . post ( self . endpoint , headers = headers , data = data . encode ( 'utf-8' ) , ** self . request_args ) _LOG . debug ( "Received %s, %s" , response . headers , response . text ) status = response . status_code if status == 200 : tree = XML . fromstring ( response . content ) body = tree . find ( "{http://schemas.xmlsoap.org/soap/envelope/}Body" ) [ 0 ] return body elif status == 500 : tree = XML . fromstring ( response . content ) fault = tree . find ( './/{http://schemas.xmlsoap.org/soap/envelope/}Fault' ) if fault is None : response . raise_for_status ( ) faultcode = fault . findtext ( "faultcode" ) faultstring = fault . findtext ( "faultstring" ) faultdetail = fault . find ( "detail" ) raise SoapFault ( faultcode , faultstring , faultdetail ) else : response . raise_for_status ( ) return None
Call the SOAP method on the server .
25,933
def __build_option_parser ( ) : description = description = textwrap . dedent ( description ) . strip ( ) parser = argparse . ArgumentParser ( description = description , formatter_class = argparse . RawTextHelpFormatter ) parser . add_argument ( 'file_' , metavar = 'FILE' , type = str , nargs = 1 , help = 'the file to analyze' ) parser . add_argument ( '-o' , '--output-prefix' , type = str , help = 'the output filename prefix to use' ) parser . add_argument ( '-f' , '--to-file' , action = 'store_const' , const = True , help = 'output xml to files' , default = False ) parser . add_argument ( '-d' , '--debug-analysis' , action = 'store_const' , const = True , help = 'writes debug information to file.debug' , default = False ) parser . add_argument ( '-m' , '--disable-color' , action = 'store_const' , const = False , help = 'disable color in interactive mode' , default = COLOR , dest = 'color' ) parser . add_argument ( '-c' , '--enable-color' , action = 'store_const' , const = True , help = 'disable color in interactive mode' , default = COLOR , dest = 'color' ) parser . add_argument ( '-b' , '--to-browser' , action = 'store_const' , const = True , help = 'output xml to browser, implies --to-file' , default = False ) parser . add_argument ( '-e' , '--external-inner-xml' , action = 'store_const' , const = True , help = 'show the internal separately ' 'encoded xml externally instead of re-integrating it' , default = False ) return parser
Build the option parser for this script
25,934
def main ( ) : parser = __build_option_parser ( ) args = parser . parse_args ( ) analyze_ws = AnalyzeWS ( args ) try : analyze_ws . set_file ( args . file_ [ 0 ] ) except IOError : print 'IOError raised while reading file. Exiting!' sys . exit ( 3 ) if args . to_file or args . to_browser : analyze_ws . to_file_mode ( ) if args . to_browser : analyze_ws . to_browser_mode ( ) else : analyze_ws . interactive_mode ( )
Main method of the script
25,935
def set_file ( self , filename ) : if self . output_prefix is None : _ , self . output_prefix = os . path . split ( filename ) if not ( os . path . isfile ( filename ) and os . access ( filename , os . R_OK ) ) : print 'The file \'{0}\' is either not present or not readable. ' 'Exiting!' . format ( filename ) sys . exit ( 1 ) try : packets = rdpcap ( filename ) except NameError : print 'The file \'{}\' is not a pcap capture file. Exiting!' . format ( filename ) sys . exit ( 2 ) for number , packet in enumerate ( packets ) : self . _debug ( '\nNUMBER {0}' . format ( number ) , no_prefix = True ) try : packet . getfieldval ( 'load' ) load = packet . sprintf ( '%TCP.payload%' ) self . _debug ( 'PAYLOAD LENGTH {0}' . format ( len ( load ) ) , no_prefix = True ) self . _debug ( load , load = True ) self . _parse_load ( load ) except AttributeError : self . _debug ( 'LOAD EXCEPTION' , no_prefix = True ) if len ( self . messages ) > 0 and not self . messages [ - 1 ] . write_closed : self . _debug ( 'DELETE LAST OPEN FILE' ) del self . messages [ - 1 ] if self . args . debug_analysis : sys . exit ( 0 )
Analyse the file with the captured content
25,936
def _parse_load ( self , load ) : if load in [ '??' ] : self . _debug ( 'IGNORING' ) elif any ( [ start in load for start in STARTS ] ) : self . _debug ( 'START' ) self . messages . append ( WSPart ( load , self . args ) ) if any ( [ end in load for end in ENDS ] ) : self . messages [ - 1 ] . finalize_content ( ) self . _debug ( 'AND END' ) elif any ( [ end in load for end in ENDS ] ) : if len ( self . messages ) > 0 and not self . messages [ - 1 ] . write_closed : self . _debug ( 'END ON OPEN FILE' ) self . messages [ - 1 ] . add_content ( load ) self . messages [ - 1 ] . finalize_content ( ) else : self . _debug ( 'END BUT NO OPEN FILE' ) else : if len ( self . messages ) > 0 and not self . messages [ - 1 ] . write_closed : self . _debug ( 'ADD TO OPEN FILE' ) self . messages [ - 1 ] . add_content ( load ) else : self . _debug ( 'NOTHING TO DO' )
Parse the load from a single packet
25,937
def _debug ( self , message , load = False , no_prefix = False ) : if self . args . debug_analysis : if load : message = '\r\n' . join ( [ '# ' + line for line in message . strip ( ) . split ( '\r\n' ) ] ) print '{0}\n{1}\n{0}' . format ( '#' * 78 , message ) else : if ( len ( self . messages ) > 0 and not self . messages [ - 1 ] . write_closed ) and not no_prefix : print '--OPEN . format ( message ) else : print message
Output debug information
25,938
def to_file_mode ( self ) : for message_no in range ( len ( self . messages ) ) : self . __to_file ( message_no )
Write all the messages to files
25,939
def __to_file ( self , message_no ) : filename = self . __create_file_name ( message_no ) try : with codecs . open ( filename , mode = 'w' , encoding = self . messages [ message_no ] . encoding ) as file__ : file__ . write ( self . messages [ message_no ] . output ) except IOError as excep : print 'Unable for open the file \'{0}\' for writing. The ' 'following exception was raised:' . format ( filename ) print excep print 'Exiting!' sys . exit ( 2 ) return filename
Write a single message to file
25,940
def __create_file_name ( self , message_no ) : cwd = os . getcwd ( ) filename = '{0}_{1}.xml' . format ( self . output_prefix , message_no ) return os . path . join ( cwd , filename )
Create the filename to save to
25,941
def to_browser_mode ( self ) : for message_no in range ( len ( self . messages ) ) : self . __to_browser ( message_no )
Write all the messages to files and open them in the browser
25,942
def __to_browser ( self , message_no ) : filename = self . __to_file ( message_no ) try : command = self . config . get ( 'General' , 'browser_command' ) except ( ConfigParser . NoOptionError , AttributeError ) : print 'Incorrect or missing .ini file. See --help.' sys . exit ( 5 ) command = str ( command ) . format ( filename ) command_list = command . split ( ' ' ) try : subprocess . Popen ( command_list , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) except OSError : print 'Unable to execute the browsercommand:' print command print 'Exiting!' sys . exit ( 21 )
Write a single message to file and open the file in a browser
25,943
def __update_window ( self , width , height , message_no , page_no ) : file_exists_label = '-F-ILE' if not os . path . exists ( self . __create_file_name ( message_no ) ) : file_exists_label = '(f)ile' if PLATFORM == 'win32' : for _ in range ( 50 ) : print else : sys . stdout . write ( '\x1b[2J\x1b[H' ) content = self . messages [ message_no ] . output . rstrip ( '\n' ) out = content if self . args . color : out = pygments . highlight ( content , XmlLexer ( ) , TerminalFormatter ( ) ) if message_no not in self . pages : self . _form_pages ( message_no , content , out , height , width ) page_no = max ( min ( len ( self . pages [ message_no ] ) - 1 , page_no ) , 0 ) page_content = self . pages [ message_no ] [ page_no ] max_message = str ( len ( self . messages ) - 1 ) position_string = u'{{0: >{0}}}/{{1: <{0}}}' . format ( len ( max_message ) ) position_string = position_string . format ( message_no , max_message ) current_max_page = len ( self . pages [ message_no ] ) - 1 pages_string = u'{0: >2}/{1: <2}' . format ( page_no , current_max_page ) menu = ( u'(b)rowser | {0} | Message {1} \u2193 (s)\u2191 (w) | ' u'Page {2} \u2190 (a)\u2192 (d) | (q)uit\n{3}' ) . format ( file_exists_label , position_string , pages_string , '-' * width ) print menu print page_content return page_no
Update the window with the menu and the new text
25,944
def _form_pages ( self , message_no , content , out , height , width ) : self . pages [ message_no ] = [ ] page_height = height - 4 outline = u'' no_lines_page = 0 for original , formatted in zip ( content . split ( '\n' ) , out . split ( '\n' ) ) : no_lines_original = int ( math . ceil ( len ( original ) / float ( width ) ) ) if len ( original ) == 0 : if no_lines_page + 1 <= page_height : outline += u'\n' no_lines_page += 1 else : self . pages [ message_no ] . append ( outline ) outline = u'\n' no_lines_page = 1 original = formatted = u'\n' elif no_lines_original > page_height : if len ( outline ) > 0 : self . pages [ message_no ] . append ( outline ) outline = u'' no_lines_page = 0 self . pages [ message_no ] . append ( formatted ) elif no_lines_page + no_lines_original <= page_height : if len ( outline ) > 0 : outline += u'\n' outline += formatted no_lines_page += no_lines_original else : self . pages [ message_no ] . append ( outline ) outline = formatted no_lines_page = no_lines_original if len ( outline ) > 0 : self . pages [ message_no ] . append ( outline ) if len ( self . pages [ message_no ] ) == 0 : self . pages [ message_no ] . append ( u'' )
Form the pages
25,945
def finalize_content ( self ) : self . write_closed = True body = self . raw_body . decode ( self . encoding ) self . _init_xml ( body ) self . _form_output ( )
Finalize the additons
25,946
def _init_xml ( self , body ) : tree = etree . fromstring ( body . encode ( self . encoding ) , PARSER ) for text in tree . xpath ( './/text()[contains(., "DIDL")]' ) : item = text . getparent ( ) didl_tree = etree . fromstring ( item . text ) if self . external_inner_xml : item . text = 'DIDL_REPLACEMENT_{0}' . format ( len ( self . inner_xml ) ) self . inner_xml . append ( didl_tree ) else : item . text = None item . append ( didl_tree ) for inner_tree in self . inner_xml : for item in inner_tree . xpath ( '//*[contains(@val, "DIDL")]' ) : if self . external_inner_xml : didl_tree = etree . fromstring ( item . attrib [ 'val' ] ) item . attrib [ 'val' ] = 'DIDL_REPLACEMENT_{0}' . format ( len ( self . inner_xml ) ) self . inner_xml . append ( didl_tree ) self . body_formatted = etree . tostring ( tree , pretty_print = True ) . decode ( self . encoding )
Parse the present body as xml
25,947
def _form_output ( self ) : self . output = u'' if self . external_inner_xml : self . output += u'<Dummy_tag_to_create_valid_xml_on_external_inner' '_xml>\n' self . output += u'<!-- BODY . format ( self . body_formatted ) if self . external_inner_xml : for number , didl in enumerate ( self . inner_xml ) : self . output += u'\n<!-- DIDL_{0} . format ( number , etree . tostring ( didl , pretty_print = True ) ) self . output += u'</Dummy_tag_to_create_valid_xml_on_external_' 'inner_xml>'
Form the output
25,948
def attempt_datastructure_upgrade ( didl_item ) : try : resource = didl_item . resources [ 0 ] except IndexError : _LOG . debug ( 'Upgrade not possible, no resources' ) return didl_item if resource . uri . startswith ( 'x-sonos-http' ) : uri = resource . uri path = urlparse ( uri ) . path path = path . rsplit ( '.' , 1 ) [ 0 ] item_id = '11111111{0}' . format ( path ) metadata = { } try : metadata [ 'title' ] = didl_item . title except AttributeError : pass try : cls = get_class ( DIDL_NAME_TO_QUALIFIED_MS_NAME [ didl_item . __class__ . __name__ ] ) except KeyError : _LOG . warning ( 'DATA STRUCTURE UPGRADE FAIL. Unable to upgrade music library ' 'data structure to music service data structure because an ' 'entry is missing for %s in DIDL_NAME_TO_QUALIFIED_MS_NAME. ' 'This should be reported as a bug.' , didl_item . __class__ . __name__ , ) return didl_item upgraded_item = cls ( item_id = item_id , desc = desc_from_uri ( resource . uri ) , resources = didl_item . resources , uri = uri , metadata_dict = metadata , ) _LOG . debug ( "Item %s upgraded to %s" , didl_item , upgraded_item ) return upgraded_item _LOG . debug ( 'Upgrade not necessary' ) return didl_item
Attempt to upgrade a didl_item to a music services data structure if it originates from a music services
25,949
def from_name ( cls , fullname , soco , * args , ** kwargs ) : _LOG . info ( 'Loading plugin %s' , fullname ) parts = fullname . split ( '.' ) modname = '.' . join ( parts [ : - 1 ] ) clsname = parts [ - 1 ] mod = importlib . import_module ( modname ) class_ = getattr ( mod , clsname ) _LOG . info ( 'Loaded class %s' , class_ ) return class_ ( soco , * args , ** kwargs )
Instantiate a plugin by its full name .
25,950
def get_ms_item ( xml , service , parent_id ) : cls = MS_TYPE_TO_CLASS . get ( xml . findtext ( ns_tag ( 'ms' , 'itemType' ) ) ) out = cls . from_xml ( xml , service , parent_id ) return out
Return the music service item that corresponds to xml .
25,951
def tags_with_text ( xml , tags = None ) : if tags is None : tags = [ ] for element in xml : if element . text is not None : tags . append ( element ) elif len ( element ) > 0 : tags_with_text ( element , tags ) else : message = 'Unknown XML structure: {}' . format ( element ) raise ValueError ( message ) return tags
Return a list of tags that contain text retrieved recursively from an XML tree .
25,952
def from_xml ( cls , xml , service , parent_id ) : content = { 'description' : service . description , 'service_id' : service . service_id , 'parent_id' : parent_id } all_text_elements = tags_with_text ( xml ) for item in all_text_elements : tag = item . tag [ len ( NAMESPACES [ 'ms' ] ) + 2 : ] tag = camel_to_underscore ( tag ) if tag not in cls . valid_fields : message = 'The info tag \'{}\' is not allowed for this item' . format ( tag ) raise ValueError ( message ) content [ tag ] = item . text for key , value in content . items ( ) : if key == 'duration' : content [ key ] = int ( value ) if key in [ 'can_play' , 'can_skip' , 'can_add_to_favorites' , 'can_enumerate' ] : content [ key ] = True if value == 'true' else False content [ 'item_id' ] = content . pop ( 'id' ) content [ 'extended_id' ] = service . id_to_extended_id ( content [ 'item_id' ] , cls ) uri = service . form_uri ( content , cls ) if uri : content [ 'uri' ] = uri for key in cls . required_fields : if key not in content : message = 'An XML field that correspond to the key \'{}\' ' 'is required. See the docstring for help.' . format ( key ) return cls . from_dict ( content )
Return a Music Service item generated from xml .
25,953
def from_dict ( cls , dict_in ) : kwargs = dict_in . copy ( ) args = [ kwargs . pop ( key ) for key in cls . required_fields ] return cls ( * args , ** kwargs )
Initialize the class from a dict .
25,954
def didl_metadata ( self ) : if not self . can_play : message = 'This item is not meant to be played and therefore ' 'also not to create its own didl_metadata' raise DIDLMetadataError ( message ) for key in [ 'extended_id' , 'title' , 'item_class' ] : if not hasattr ( self , key ) : message = 'The property \'{}\' is not present on this item. ' 'This indicates that this item was not meant to create ' 'didl_metadata' . format ( key ) raise DIDLMetadataError ( message ) if 'description' not in self . content : message = 'The item for \'description\' is not present in ' 'self.content. This indicates that this item was not meant ' 'to create didl_metadata' raise DIDLMetadataError ( message ) item_attrib = { 'xmlns:dc' : 'http://purl.org/dc/elements/1.1/' , 'xmlns:upnp' : 'urn:schemas-upnp-org:metadata-1-0/upnp/' , 'xmlns:r' : 'urn:schemas-rinconnetworks-com:metadata-1-0/' , 'xmlns' : 'urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/' } xml = XML . Element ( 'DIDL-Lite' , item_attrib ) item_attrib = { 'parentID' : '' , 'restricted' : 'true' , 'id' : self . extended_id } if self . parent_id : item_attrib [ 'parentID' ] = self . parent_id item = XML . SubElement ( xml , 'item' , item_attrib ) XML . SubElement ( item , 'dc:title' ) . text = self . title XML . SubElement ( item , 'upnp:class' ) . text = self . item_class desc_attrib = { 'id' : 'cdudn' , 'nameSpace' : 'urn:schemas-rinconnetworks-com:metadata-1-0/' } desc = XML . SubElement ( item , 'desc' , desc_attrib ) desc . text = self . content [ 'description' ] return xml
Return the DIDL metadata for a Music Service Track .
25,955
def get_alarms ( zone = None ) : if zone is None : zone = discovery . any_soco ( ) response = zone . alarmClock . ListAlarms ( ) alarm_list = response [ 'CurrentAlarmList' ] tree = XML . fromstring ( alarm_list . encode ( 'utf-8' ) ) alarms = tree . findall ( 'Alarm' ) result = set ( ) for alarm in alarms : values = alarm . attrib alarm_id = values [ 'ID' ] if Alarm . _all_alarms . get ( alarm_id ) : instance = Alarm . _all_alarms . get ( alarm_id ) else : instance = Alarm ( None ) instance . _alarm_id = alarm_id Alarm . _all_alarms [ instance . _alarm_id ] = instance instance . start_time = datetime . strptime ( values [ 'StartTime' ] , "%H:%M:%S" ) . time ( ) instance . duration = None if values [ 'Duration' ] == '' else datetime . strptime ( values [ 'Duration' ] , "%H:%M:%S" ) . time ( ) instance . recurrence = values [ 'Recurrence' ] instance . enabled = values [ 'Enabled' ] == '1' instance . zone = next ( ( z for z in zone . all_zones if z . uid == values [ 'RoomUUID' ] ) , None ) if instance . zone is None : continue instance . program_uri = None if values [ 'ProgramURI' ] == "x-rincon-buzzer:0" else values [ 'ProgramURI' ] instance . program_metadata = values [ 'ProgramMetaData' ] instance . play_mode = values [ 'PlayMode' ] instance . volume = values [ 'Volume' ] instance . include_linked_zones = values [ 'IncludeLinkedZones' ] == '1' result . add ( instance ) return result
Get a set of all alarms known to the Sonos system .
25,956
def play_mode ( self , play_mode ) : play_mode = play_mode . upper ( ) if play_mode not in PLAY_MODES : raise KeyError ( "'%s' is not a valid play mode" % play_mode ) self . _play_mode = play_mode
See playmode .
25,957
def volume ( self , volume ) : volume = int ( volume ) self . _volume = max ( 0 , min ( volume , 100 ) )
See volume .
25,958
def recurrence ( self , recurrence ) : if not is_valid_recurrence ( recurrence ) : raise KeyError ( "'%s' is not a valid recurrence value" % recurrence ) self . _recurrence = recurrence
See recurrence .
25,959
def save ( self ) : args = [ ( 'StartLocalTime' , self . start_time . strftime ( TIME_FORMAT ) ) , ( 'Duration' , '' if self . duration is None else self . duration . strftime ( TIME_FORMAT ) ) , ( 'Recurrence' , self . recurrence ) , ( 'Enabled' , '1' if self . enabled else '0' ) , ( 'RoomUUID' , self . zone . uid ) , ( 'ProgramURI' , "x-rincon-buzzer:0" if self . program_uri is None else self . program_uri ) , ( 'ProgramMetaData' , self . program_metadata ) , ( 'PlayMode' , self . play_mode ) , ( 'Volume' , self . volume ) , ( 'IncludeLinkedZones' , '1' if self . include_linked_zones else '0' ) ] if self . _alarm_id is None : response = self . zone . alarmClock . CreateAlarm ( args ) self . _alarm_id = response [ 'AssignedID' ] Alarm . _all_alarms [ self . _alarm_id ] = self else : args . insert ( 0 , ( 'ID' , self . _alarm_id ) ) self . zone . alarmClock . UpdateAlarm ( args )
Save the alarm to the Sonos system .
25,960
def remove ( self ) : self . zone . alarmClock . DestroyAlarm ( [ ( 'ID' , self . _alarm_id ) ] ) alarm_id = self . _alarm_id try : del Alarm . _all_alarms [ alarm_id ] except KeyError : pass self . _alarm_id = None
Remove the alarm from the Sonos system .
25,961
def main ( ) : parser = argparse . ArgumentParser ( prog = '' , description = 'Dump data about Sonos services' ) parser . add_argument ( '-d' , '--device' , default = None , help = "The ip address of the device to query. " "If none is supplied, a random device will be used" ) parser . add_argument ( '-s' , '--service' , default = None , help = "Dump data relating to services matching this regexp " "only, e.g. %(prog)s -s GroupRenderingControl" ) args = parser . parse_args ( ) if args . device : device = soco . SoCo ( args . device ) else : device = soco . discovery . any_soco ( ) print ( "Querying %s" % device . player_name ) services = ( srv ( device ) for srv in soco . services . Service . __subclasses__ ( ) ) for srv in services : if args . service is None or re . search ( args . service , srv . service_type ) : print_details ( srv )
Run the main script
25,962
def print_details ( srv ) : name = srv . service_type box = "=" * 79 print ( "{0}\n|{1:^77}|\n{0}\n" . format ( box , name ) ) for action in srv . iter_actions ( ) : print ( action . name ) print ( "~" * len ( action . name ) ) print ( "\n Input" ) for arg in action . in_args : print ( " " , arg ) print ( "\n Output" ) for arg in action . out_args : print ( " " , arg ) print ( "\n\n" )
Print the details of a service
25,963
def snapshot ( self ) : self . is_coordinator = self . device . is_coordinator media_info = self . device . avTransport . GetMediaInfo ( [ ( 'InstanceID' , 0 ) ] ) self . media_uri = media_info [ 'CurrentURI' ] if self . media_uri . split ( ':' ) [ 0 ] == 'x-rincon-queue' : if self . media_uri . split ( '#' ) [ 1 ] == '0' : self . is_playing_queue = True else : self . is_playing_cloud_queue = True self . volume = self . device . volume self . mute = self . device . mute self . bass = self . device . bass self . treble = self . device . treble self . loudness = self . device . loudness if self . is_playing_queue : self . play_mode = self . device . play_mode self . cross_fade = self . device . cross_fade track_info = self . device . get_current_track_info ( ) if track_info is not None : position = track_info [ 'playlist_position' ] if position != "" : self . playlist_position = int ( position ) self . track_position = track_info [ 'position' ] else : self . media_metadata = media_info [ 'CurrentURIMetaData' ] if self . is_coordinator : transport_info = self . device . get_current_transport_info ( ) if transport_info is not None : self . transport_state = transport_info [ 'current_transport_state' ] self . _save_queue ( ) return self . is_coordinator
Record and store the current state of a device .
25,964
def _save_queue ( self ) : if self . queue is not None : batch_size = 400 total = 0 num_return = batch_size while num_return == batch_size : queue_items = self . device . get_queue ( total , batch_size ) num_return = len ( queue_items ) if num_return > 0 : self . queue . append ( queue_items ) total = total + num_return
Save the current state of the queue .
25,965
def _restore_queue ( self ) : if self . queue is not None : self . device . clear_queue ( ) for queue_group in self . queue : for queue_item in queue_group : self . device . add_uri_to_queue ( queue_item . uri )
Restore the previous state of the queue .
25,966
def default_params ( model , update_dict = None ) : if model in parameters : params = parameters [ model ] . copy ( ) else : params = None if update_dict : if params is None : params = { } params . update ( update_dict ) return params
Loads and updates default model parameters
25,967
def describe ( x , reduce = 'IncrementalPCA' , max_dims = None , show = True , format_data = True ) : warnings . warn ( 'When input data is large, this computation can take a long time.' ) def summary ( x , max_dims = None ) : if type ( x ) is list : x = np . vstack ( x ) if max_dims is None : if x . shape [ 1 ] > x . shape [ 0 ] : max_dims = x . shape [ 0 ] else : max_dims = x . shape [ 1 ] alldims = get_cdist ( x ) corrs = [ ] for dims in range ( 2 , max_dims ) : reduced = get_cdist ( reducer ( x , ndims = dims , reduce = reduce ) ) corrs . append ( get_corr ( alldims , reduced ) ) del reduced return corrs if format_data : x = formatter ( x , ppca = True ) result = { } result [ 'average' ] = summary ( x , max_dims ) result [ 'individual' ] = [ summary ( x_i , max_dims ) for x_i in x ] if max_dims is None : max_dims = len ( result [ 'average' ] ) if show : fig , ax = plt . subplots ( ) ax = sns . tsplot ( data = result [ 'individual' ] , time = [ i for i in range ( 2 , max_dims + 2 ) ] , err_style = "unit_traces" ) ax . set_title ( 'Correlation with raw data by number of components' ) ax . set_ylabel ( 'Correlation' ) ax . set_xlabel ( 'Number of components' ) plt . show ( ) return result
Create plot describing covariance with as a function of number of dimensions
25,968
def missing_inds ( x , format_data = True ) : if format_data : x = formatter ( x , ppca = False ) inds = [ ] for arr in x : if np . argwhere ( np . isnan ( arr ) ) . size is 0 : inds . append ( None ) else : inds . append ( np . argwhere ( np . isnan ( arr ) ) [ : , 0 ] ) if len ( inds ) > 1 : return inds else : return inds [ 0 ]
Returns indices of missing data
25,969
def normalize ( x , normalize = 'across' , internal = False , format_data = True ) : assert normalize in [ 'across' , 'within' , 'row' , False , None ] , "scale_type must be across, within, row or none." if normalize in [ False , None ] : return x else : if format_data : x = formatter ( x , ppca = True ) zscore = lambda X , y : ( y - np . mean ( X ) ) / np . std ( X ) if len ( set ( y ) ) > 1 else np . zeros ( y . shape ) if normalize == 'across' : x_stacked = np . vstack ( x ) normalized_x = [ np . array ( [ zscore ( x_stacked [ : , j ] , i [ : , j ] ) for j in range ( i . shape [ 1 ] ) ] ) . T for i in x ] elif normalize == 'within' : normalized_x = [ np . array ( [ zscore ( i [ : , j ] , i [ : , j ] ) for j in range ( i . shape [ 1 ] ) ] ) . T for i in x ] elif normalize == 'row' : normalized_x = [ np . array ( [ zscore ( i [ j , : ] , i [ j , : ] ) for j in range ( i . shape [ 0 ] ) ] ) for i in x ] if internal or len ( normalized_x ) > 1 : return normalized_x else : return normalized_x [ 0 ]
Z - transform the columns or rows of an array or list of arrays
25,970
def _init_structures ( self , data , subjects ) : x = [ ] mu = [ ] rho2 = np . zeros ( subjects ) trace_xtx = np . zeros ( subjects ) for subject in range ( subjects ) : mu . append ( np . mean ( data [ subject ] , 1 ) ) rho2 [ subject ] = 1 trace_xtx [ subject ] = np . sum ( data [ subject ] ** 2 ) x . append ( data [ subject ] - mu [ subject ] [ : , np . newaxis ] ) return x , mu , rho2 , trace_xtx
Initializes data structures for SRM and preprocess the data .
25,971
def _likelihood ( self , chol_sigma_s_rhos , log_det_psi , chol_sigma_s , trace_xt_invsigma2_x , inv_sigma_s_rhos , wt_invpsi_x , samples ) : log_det = ( np . log ( np . diag ( chol_sigma_s_rhos ) ** 2 ) . sum ( ) + log_det_psi + np . log ( np . diag ( chol_sigma_s ) ** 2 ) . sum ( ) ) loglikehood = - 0.5 * samples * log_det - 0.5 * trace_xt_invsigma2_x loglikehood += 0.5 * np . trace ( wt_invpsi_x . T . dot ( inv_sigma_s_rhos ) . dot ( wt_invpsi_x ) ) return loglikehood
Calculate the log - likelihood function
25,972
def fit ( self , X , y = None ) : logger . info ( 'Starting Deterministic SRM' ) if len ( X ) <= 1 : raise ValueError ( "There are not enough subjects " "({0:d}) to train the model." . format ( len ( X ) ) ) if X [ 0 ] . shape [ 1 ] < self . features : raise ValueError ( "There are not enough samples to train the model with " "{0:d} features." . format ( self . features ) ) number_trs = X [ 0 ] . shape [ 1 ] number_subjects = len ( X ) for subject in range ( number_subjects ) : assert_all_finite ( X [ subject ] ) if X [ subject ] . shape [ 1 ] != number_trs : raise ValueError ( "Different number of samples between subjects" "." ) self . w_ , self . s_ = self . _srm ( X ) return self
Compute the Deterministic Shared Response Model
25,973
def _objective_function ( self , data , w , s ) : subjects = len ( data ) objective = 0.0 for m in range ( subjects ) : objective += np . linalg . norm ( data [ m ] - w [ m ] . dot ( s ) , 'fro' ) ** 2 return objective * 0.5 / data [ 0 ] . shape [ 1 ]
Calculate the objective function
25,974
def text2mat ( data , vectorizer = 'CountVectorizer' , semantic = 'LatentDirichletAllocation' , corpus = 'wiki' ) : if semantic is None : semantic = 'LatentDirichletAllocation' if vectorizer is None : vectorizer = 'CountVectorizer' model_is_fit = False if corpus is not None : if corpus in ( 'wiki' , 'nips' , 'sotus' , ) : if semantic == 'LatentDirichletAllocation' and vectorizer == 'CountVectorizer' : semantic = load ( corpus + '_model' ) vectorizer = None model_is_fit = True else : corpus = np . array ( load ( corpus ) . get_data ( ) ) else : corpus = np . array ( [ corpus ] ) vtype = _check_mtype ( vectorizer ) if vtype == 'str' : vectorizer_params = default_params ( vectorizer ) elif vtype == 'dict' : vectorizer_params = default_params ( vectorizer [ 'model' ] , vectorizer [ 'params' ] ) vectorizer = vectorizer [ 'model' ] elif vtype in ( 'class' , 'class_instance' ) : if hasattr ( vectorizer , 'fit_transform' ) : vectorizer_models . update ( { 'user_model' : vectorizer } ) vectorizer = 'user_model' else : raise RuntimeError ( 'Error: Vectorizer model must have fit_transform ' 'method following the scikit-learn API. See here ' 'for more details: ' 'http://scikit-learn.org/stable/data_transforms.html' ) ttype = _check_mtype ( semantic ) if ttype == 'str' : text_params = default_params ( semantic ) elif ttype == 'dict' : text_params = default_params ( semantic [ 'model' ] , semantic [ 'params' ] ) semantic = semantic [ 'model' ] elif ttype in ( 'class' , 'class_instance' ) : if hasattr ( semantic , 'fit_transform' ) : texts . update ( { 'user_model' : semantic } ) semantic = 'user_model' else : raise RuntimeError ( 'Text model must have fit_transform ' 'method following the scikit-learn API. See here ' 'for more details: ' 'http://scikit-learn.org/stable/data_transforms.html' ) if vectorizer : if vtype in ( 'str' , 'dict' ) : vmodel = vectorizer_models [ vectorizer ] ( ** vectorizer_params ) elif vtype == 'class' : vmodel = vectorizer_models [ vectorizer ] ( ) elif vtype == 'class_instance' : vmodel = vectorizer_models [ vectorizer ] else : vmodel = None if semantic : if ttype in ( 'str' , 'dict' ) : tmodel = texts [ semantic ] ( ** text_params ) elif ttype == 'class' : tmodel = texts [ semantic ] ( ) elif ttype == 'class_instance' : tmodel = texts [ semantic ] else : tmodel = None if not isinstance ( data , list ) : data = [ data ] if corpus is None : _fit_models ( vmodel , tmodel , data , model_is_fit ) else : _fit_models ( vmodel , tmodel , corpus , model_is_fit ) return _transform ( vmodel , tmodel , data )
Turns a list of text samples into a matrix using a vectorizer and a text model
25,975
def patch_lines ( x ) : for idx in range ( len ( x ) - 1 ) : x [ idx ] = np . vstack ( [ x [ idx ] , x [ idx + 1 ] [ 0 , : ] ] ) return x
Draw lines between groups
25,976
def check_geo ( geo ) : geo = copy . copy ( geo ) def fix_item ( item ) : if isinstance ( item , six . binary_type ) : return item . decode ( ) return item def fix_list ( lst ) : return [ fix_item ( i ) for i in lst ] if isinstance ( geo . reduce , six . binary_type ) : geo . reduce = geo . reduce . decode ( ) for key in geo . kwargs . keys ( ) : if geo . kwargs [ key ] is not None : if isinstance ( geo . kwargs [ key ] , ( list , np . ndarray ) ) : geo . kwargs [ key ] = fix_list ( geo . kwargs [ key ] ) elif isinstance ( geo . kwargs [ key ] , six . binary_type ) : geo . kwargs [ key ] = fix_item ( geo . kwargs [ key ] ) return geo
Checks a geo and makes sure the text fields are not binary
25,977
def df2mat ( data , return_labels = False ) : df_str = data . select_dtypes ( include = [ 'object' ] ) df_num = data . select_dtypes ( exclude = [ 'object' ] ) for colname in df_str . columns : df_num = df_num . join ( pd . get_dummies ( data [ colname ] , prefix = colname ) ) plot_data = df_num . as_matrix ( ) labels = list ( df_num . columns . values ) if return_labels : return plot_data , labels else : return plot_data
Transforms a Pandas DataFrame into a Numpy array with binarized text columns
25,978
def load ( dataset , reduce = None , ndims = None , align = None , normalize = None ) : if dataset [ - 4 : ] == '.geo' : geo = dd . io . load ( dataset ) if 'dtype' in geo : if 'list' in geo [ 'dtype' ] : geo [ 'data' ] = list ( geo [ 'data' ] ) elif 'df' in geo [ 'dtype' ] : geo [ 'data' ] = pd . DataFrame ( geo [ 'data' ] ) geo [ 'xform_data' ] = list ( geo [ 'xform_data' ] ) data = DataGeometry ( ** geo ) elif dataset in datadict . keys ( ) : data = _load_data ( dataset , datadict [ dataset ] ) else : raise RuntimeError ( 'No data loaded. Please specify a .geo file or ' 'one of the following sample files: weights, ' 'weights_avg, weights_sample, spiral, mushrooms, ' 'wiki, nips or sotus.' ) if data is not None : if dataset in ( 'wiki_model' , 'nips_model' , 'sotus_model' ) : return data if isinstance ( data , DataGeometry ) : if any ( [ reduce , ndims , align , normalize ] ) : from . . plot . plot import plot if ndims : if reduce is None : reduce = 'IncrementalPCA' d = analyze ( data . get_data ( ) , reduce = reduce , ndims = ndims , align = align , normalize = normalize ) return plot ( d , show = False ) else : return data else : return analyze ( data , reduce = reduce , ndims = ndims , align = align , normalize = normalize )
Load a . geo file or example data
25,979
def transform ( self , data = None ) : if data is None : return self . xform_data else : formatted = format_data ( data , semantic = self . semantic , vectorizer = self . vectorizer , corpus = self . corpus , ppca = True ) norm = normalizer ( formatted , normalize = self . normalize ) reduction = reducer ( norm , reduce = self . reduce , ndims = self . reduce [ 'params' ] [ 'n_components' ] ) return aligner ( reduction , align = self . align )
Return transformed data or transform new data using the same model parameters
25,980
def save ( self , fname , compression = 'blosc' ) : if hasattr ( self , 'dtype' ) : if 'list' in self . dtype : data = np . array ( self . data ) elif 'df' in self . dtype : data = { k : np . array ( v ) . astype ( 'str' ) for k , v in self . data . to_dict ( 'list' ) . items ( ) } else : data = self . data geo = { 'data' : data , 'xform_data' : np . array ( self . xform_data ) , 'reduce' : self . reduce , 'align' : self . align , 'normalize' : self . normalize , 'semantic' : self . semantic , 'corpus' : np . array ( self . corpus ) if isinstance ( self . corpus , list ) else self . corpus , 'kwargs' : self . kwargs , 'version' : self . version , 'dtype' : self . dtype } if fname [ - 4 : ] != '.geo' : fname += '.geo' dd . io . save ( fname , geo , compression = compression )
Save method for the data geometry object
25,981
def analyze ( data , normalize = None , reduce = None , ndims = None , align = None , internal = False ) : return aligner ( reducer ( normalizer ( data , normalize = normalize , internal = internal ) , reduce = reduce , ndims = ndims , internal = internal ) , align = align )
Wrapper function for normalize - > reduce - > align transformations .
25,982
def reduce ( x , reduce = 'IncrementalPCA' , ndims = None , normalize = None , align = None , model = None , model_params = None , internal = False , format_data = True ) : if ( model is not None ) or ( model_params is not None ) : warnings . warn ( 'Model and model params will be deprecated. Please use the \ reduce keyword. See API docs for more info: http://hypertools.readthedocs.io/en/latest/hypertools.tools.reduce.html#hypertools.tools.reduce' ) reduce = { } reduce [ 'model' ] = model reduce [ 'params' ] = model_params if reduce is None : return x else : if format_data : x = formatter ( x , ppca = True ) if np . vstack ( [ i for i in x ] ) . shape [ 0 ] == 1 : warnings . warn ( 'Cannot reduce the dimensionality of a single row of' ' data. Return zeros length of ndims' ) return [ np . zeros ( ( 1 , ndims ) ) ] if ndims : if np . vstack ( [ i for i in x ] ) . shape [ 0 ] < ndims : warnings . warn ( 'The number of rows in your data is less than ndims.' ' The data will be reduced to the number of rows.' ) if normalize is not None : warnings . warn ( 'The normalize argument will be deprecated for this function. Please use the \ analyze function to perform combinations of these transformations. See API docs for more info: http://hypertools.readthedocs.io/en/latest/hypertools.analyze.html#hypertools.analyze' ) x = normalizer ( x , normalize = normalize ) if align is not None : warnings . warn ( 'The align argument will be deprecated for this function. Please use the \ analyze function to perform combinations of these transformations. See API docs for more info: http://hypertools.readthedocs.io/en/latest/hypertools.analyze.html#hypertools.analyze' ) x = aligner ( x , align = align ) if ndims is None : return x elif all ( [ i . shape [ 1 ] <= ndims for i in x ] ) : return x if type ( reduce ) in [ str , np . string_ ] : model = models [ reduce ] model_params = { 'n_components' : ndims } elif type ( reduce ) is dict : if isinstance ( ( reduce [ 'model' ] ) , six . string_types ) : model = models [ reduce [ 'model' ] ] if reduce [ 'params' ] is None : model_params = { 'n_components' : ndims } else : model_params = reduce [ 'params' ] if ndims : model_params = { 'n_components' : ndims } model = model ( ** model_params ) x_reduced = reduce_list ( x , model ) if internal or len ( x_reduced ) > 1 : return x_reduced else : return x_reduced [ 0 ]
Reduces dimensionality of an array or list of arrays
25,983
def cluster ( x , cluster = 'KMeans' , n_clusters = 3 , ndims = None , format_data = True ) : if cluster == None : return x elif ( isinstance ( cluster , six . string_types ) and cluster == 'HDBSCAN' ) or ( isinstance ( cluster , dict ) and cluster [ 'model' ] == 'HDBSCAN' ) : if not _has_hdbscan : raise ImportError ( 'HDBSCAN is not installed. Please install hdbscan>=0.8.11' ) if ndims != None : warnings . warn ( 'The ndims argument is now deprecated. Ignoring dimensionality reduction step.' ) if format_data : x = formatter ( x , ppca = True ) if isinstance ( cluster , six . string_types ) : model = models [ cluster ] if cluster != 'HDBSCAN' : model_params = { 'n_clusters' : n_clusters } else : model_params = { } elif type ( cluster ) is dict : if isinstance ( cluster [ 'model' ] , six . string_types ) : model = models [ cluster [ 'model' ] ] model_params = cluster [ 'params' ] model = model ( ** model_params ) model . fit ( np . vstack ( x ) ) return list ( model . labels_ )
Performs clustering analysis and returns a list of cluster labels
25,984
def build_grab_exception ( ex , curl ) : if ex . args [ 0 ] == 23 : if getattr ( curl , 'grab_callback_interrupted' , None ) is True : return None else : return error . GrabNetworkError ( ex . args [ 1 ] , ex ) else : if ex . args [ 0 ] == 28 : return error . GrabTimeoutError ( ex . args [ 1 ] , ex ) elif ex . args [ 0 ] == 7 : return error . GrabConnectionError ( ex . args [ 1 ] , ex ) elif ex . args [ 0 ] == 67 : return error . GrabAuthError ( ex . args [ 1 ] , ex ) elif ex . args [ 0 ] == 47 : return error . GrabTooManyRedirectsError ( ex . args [ 1 ] , ex ) elif ex . args [ 0 ] == 6 : return error . GrabCouldNotResolveHostError ( ex . args [ 1 ] , ex ) elif ex . args [ 0 ] == 3 : return error . GrabInvalidUrl ( ex . args [ 1 ] , ex ) else : return error . GrabNetworkError ( ex . args [ 1 ] , ex )
Build Grab exception from the pycurl exception
25,985
def body_processor ( self , chunk ) : if self . config_nobody : self . curl . grab_callback_interrupted = True return 0 bytes_read = len ( chunk ) self . response_body_bytes_read += bytes_read if self . body_file : self . body_file . write ( chunk ) else : self . response_body_chunks . append ( chunk ) if self . config_body_maxsize is not None : if self . response_body_bytes_read > self . config_body_maxsize : logger . debug ( 'Response body max size limit reached: %s' , self . config_body_maxsize ) self . curl . grab_callback_interrupted = True return 0 return None
Process body of response .
25,986
def debug_processor ( self , _type , text ) : if _type == pycurl . INFOTYPE_HEADER_OUT : if isinstance ( text , six . text_type ) : text = text . encode ( 'utf-8' ) self . request_head += text if _type == pycurl . INFOTYPE_DATA_OUT : if isinstance ( text , six . text_type ) : text = text . encode ( 'utf-8' ) self . request_body += text if self . verbose_logging : if _type in ( pycurl . INFOTYPE_TEXT , pycurl . INFOTYPE_HEADER_IN , pycurl . INFOTYPE_HEADER_OUT ) : marker_types = { pycurl . INFOTYPE_TEXT : 'i' , pycurl . INFOTYPE_HEADER_IN : '<' , pycurl . INFOTYPE_HEADER_OUT : '>' , } marker = marker_types [ _type ] logger . debug ( '%s: %s' , marker , text . rstrip ( ) )
Process request details .
25,987
def extract_cookiejar ( self ) : cookiejar = CookieJar ( ) for line in self . curl . getinfo ( pycurl . INFO_COOKIELIST ) : values = line . split ( '\t' ) domain = values [ 0 ] . lower ( ) if domain . startswith ( '#httponly_' ) : domain = domain . replace ( '#httponly_' , '' ) httponly = True else : httponly = False cookie = create_cookie ( name = values [ 5 ] , value = values [ 6 ] , domain = domain , path = values [ 2 ] , secure = values [ 3 ] == "TRUE" , expires = int ( values [ 4 ] ) if values [ 4 ] else None , httponly = httponly , ) cookiejar . set_cookie ( cookie ) return cookiejar
Extract cookies that pycurl instance knows .
25,988
def default_logging ( grab_log = None , network_log = None , level = logging . DEBUG , mode = 'a' , propagate_network_logger = False ) : logging . basicConfig ( level = level ) network_logger = logging . getLogger ( 'grab.network' ) network_logger . propagate = propagate_network_logger if network_log : hdl = logging . FileHandler ( network_log , mode ) network_logger . addHandler ( hdl ) network_logger . setLevel ( level ) grab_logger = logging . getLogger ( 'grab' ) if grab_log : hdl = logging . FileHandler ( grab_log , mode ) grab_logger . addHandler ( hdl ) grab_logger . setLevel ( level )
Customize logging output to display all log messages except grab network logs .
25,989
def save_list ( lst , path ) : with open ( path , 'wb' ) as out : lines = [ ] for item in lst : if isinstance ( item , ( six . text_type , six . binary_type ) ) : lines . append ( make_str ( item ) ) else : lines . append ( make_str ( json . dumps ( item ) ) ) out . write ( b'\n' . join ( lines ) + b'\n' )
Save items from list to the file .
25,990
def parse_proxy_line ( line ) : line = line . strip ( ) match = RE_SIMPLE_PROXY . search ( line ) if match : return match . group ( 1 ) , match . group ( 2 ) , None , None match = RE_AUTH_PROXY . search ( line ) if match : host , port , user , pwd = match . groups ( ) return host , port , user , pwd raise InvalidProxyLine ( 'Invalid proxy line: %s' % line )
Parse proxy details from the raw text line .
25,991
def parse_raw_list_data ( data , proxy_type = 'http' , proxy_userpwd = None ) : if not isinstance ( data , six . text_type ) : data = data . decode ( 'utf-8' ) for orig_line in data . splitlines ( ) : line = orig_line . strip ( ) . replace ( ' ' , '' ) if line and not line . startswith ( '#' ) : try : host , port , username , password = parse_proxy_line ( line ) except InvalidProxyLine as ex : logger . error ( ex ) else : if username is None and proxy_userpwd is not None : username , password = proxy_userpwd . split ( ':' ) yield Proxy ( host , port , username , password , proxy_type )
Iterate over proxy servers found in the raw data
25,992
def load ( self ) : self . _list = self . _source . load ( ) self . _list_iter = itertools . cycle ( self . _list )
Load proxy list from configured proxy source
25,993
def get_random_proxy ( self ) : idx = randint ( 0 , len ( self . _list ) - 1 ) return self . _list [ idx ]
Return random proxy
25,994
def clone ( self , ** kwargs ) : attr_copy = self . __dict__ . copy ( ) if attr_copy . get ( 'grab_config' ) is not None : del attr_copy [ 'url' ] if not attr_copy [ 'priority_set_explicitly' ] : attr_copy [ 'priority' ] = None task = Task ( ** attr_copy ) if 'network_try_count' not in kwargs : task . network_try_count = 0 if 'task_try_count' not in kwargs : task . task_try_count = self . task_try_count + 1 if 'refresh_cache' not in kwargs : task . refresh_cache = False if 'disable_cache' not in kwargs : task . disable_cache = False if kwargs . get ( 'url' ) is not None and kwargs . get ( 'grab' ) is not None : raise SpiderMisuseError ( 'Options url and grab could not be ' 'used together' ) if ( kwargs . get ( 'url' ) is not None and kwargs . get ( 'grab_config' ) is not None ) : raise SpiderMisuseError ( 'Options url and grab_config could not ' 'be used together' ) if ( kwargs . get ( 'grab' ) is not None and kwargs . get ( 'grab_config' ) is not None ) : raise SpiderMisuseError ( 'Options grab and grab_config could not ' 'be used together' ) if kwargs . get ( 'grab' ) : task . setup_grab_config ( kwargs [ 'grab' ] . dump_config ( ) ) del kwargs [ 'grab' ] elif kwargs . get ( 'grab_config' ) : task . setup_grab_config ( kwargs [ 'grab_config' ] ) del kwargs [ 'grab_config' ] elif kwargs . get ( 'url' ) : task . url = kwargs [ 'url' ] if task . grab_config : task . grab_config [ 'url' ] = kwargs [ 'url' ] del kwargs [ 'url' ] for key , value in kwargs . items ( ) : setattr ( task , key , value ) task . process_delay_option ( None ) return task
Clone Task instance .
25,995
def copy_config ( config , mutable_config_keys = MUTABLE_CONFIG_KEYS ) : cloned_config = copy ( config ) for key in mutable_config_keys : cloned_config [ key ] = copy ( config [ key ] ) return cloned_config
Copy grab config with correct handling of mutable config values .
25,996
def reset ( self ) : self . request_head = None self . request_body = None self . request_method = None self . request_counter = None self . exception = None if self . transport : self . transport . reset ( )
Reset all attributes which could be modified during previous request or which is not initialized yet if this is the new Grab instance .
25,997
def clone ( self , ** kwargs ) : grab = Grab ( transport = self . transport_param ) grab . config = self . dump_config ( ) grab . doc = self . doc . copy ( ) for key in self . clonable_attributes : setattr ( grab , key , getattr ( self , key ) ) grab . cookies = deepcopy ( self . cookies ) if kwargs : grab . setup ( ** kwargs ) return grab
Create clone of Grab instance .
25,998
def adopt ( self , grab ) : self . load_config ( grab . config ) self . doc = grab . doc . copy ( new_grab = self ) for key in self . clonable_attributes : setattr ( self , key , getattr ( grab , key ) ) self . cookies = deepcopy ( grab . cookies )
Copy the state of another Grab instance .
25,999
def dump_config ( self ) : conf = copy_config ( self . config , self . mutable_config_keys ) conf [ 'state' ] = { 'cookiejar_cookies' : list ( self . cookies . cookiejar ) , } return conf
Make clone of current config .