idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
25,800
def get_soa_record ( client , zone_id , zone_name ) : response = client . list_resource_record_sets ( HostedZoneId = zone_id , StartRecordName = zone_name , StartRecordType = "SOA" , MaxItems = "1" ) return SOARecord ( response [ "ResourceRecordSets" ] [ 0 ] )
Gets the SOA record for zone_name from zone_id .
25,801
def create_route53_zone ( client , zone_name ) : if not zone_name . endswith ( "." ) : zone_name += "." zone_id = get_or_create_hosted_zone ( client , zone_name ) old_soa = get_soa_record ( client , zone_id , zone_name ) if old_soa . text . min_ttl == "300" : return zone_id new_soa = copy . deepcopy ( old_soa ) logger . debug ( "Updating negative caching value on zone %s to 300." , zone_name ) new_soa . text . min_ttl = "300" client . change_resource_record_sets ( HostedZoneId = zone_id , ChangeBatch = { "Comment" : "Update SOA min_ttl to 300." , "Changes" : [ { "Action" : "UPSERT" , "ResourceRecordSet" : { "Name" : zone_name , "Type" : "SOA" , "TTL" : old_soa . ttl , "ResourceRecords" : [ { "Value" : str ( new_soa . text ) } ] } } , ] } ) return zone_id
Creates the given zone_name if it doesn t already exists .
25,802
def load_object_from_string ( fqcn ) : module_path = "__main__" object_name = fqcn if "." in fqcn : module_path , object_name = fqcn . rsplit ( "." , 1 ) importlib . import_module ( module_path ) return getattr ( sys . modules [ module_path ] , object_name )
Converts . delimited strings to a python object .
25,803
def merge_map ( a , b ) : if isinstance ( a , list ) and isinstance ( b , list ) : return a + b if not isinstance ( a , dict ) or not isinstance ( b , dict ) : return b for key in b : a [ key ] = merge_map ( a [ key ] , b [ key ] ) if key in a else b [ key ] return a
Recursively merge elements of argument b into argument a .
25,804
def yaml_to_ordered_dict ( stream , loader = yaml . SafeLoader ) : class OrderedUniqueLoader ( loader ) : NO_DUPE_SIBLINGS = [ "stacks" , "class_path" ] NO_DUPE_CHILDREN = [ "stacks" ] def _error_mapping_on_dupe ( self , node , node_name ) : if isinstance ( node , MappingNode ) : mapping = { } for n in node . value : a = n [ 0 ] b = mapping . get ( a . value , None ) if b : msg = "{} mapping cannot have duplicate keys {} {}" raise ConstructorError ( msg . format ( node_name , b . start_mark , a . start_mark ) ) mapping [ a . value ] = a def _validate_mapping ( self , node , deep = False ) : if not isinstance ( node , MappingNode ) : raise ConstructorError ( None , None , "expected a mapping node, but found %s" % node . id , node . start_mark ) mapping = OrderedDict ( ) for key_node , value_node in node . value : key = self . construct_object ( key_node , deep = deep ) try : hash ( key ) except TypeError as exc : raise ConstructorError ( "while constructing a mapping" , node . start_mark , "found unhashable key (%s)" % exc , key_node . start_mark ) if key in mapping and key in self . NO_DUPE_SIBLINGS : msg = "{} key cannot have duplicate siblings {} {}" raise ConstructorError ( msg . format ( key , node . start_mark , key_node . start_mark ) ) if key in self . NO_DUPE_CHILDREN : self . _error_mapping_on_dupe ( value_node , key_node . value ) value = self . construct_object ( value_node , deep = deep ) mapping [ key ] = value return mapping def construct_mapping ( self , node , deep = False ) : if isinstance ( node , MappingNode ) : self . flatten_mapping ( node ) return self . _validate_mapping ( node , deep = deep ) def construct_yaml_map ( self , node ) : data = OrderedDict ( ) yield data value = self . construct_mapping ( node ) data . update ( value ) OrderedUniqueLoader . add_constructor ( u'tag:yaml.org,2002:map' , OrderedUniqueLoader . construct_yaml_map , ) return yaml . load ( stream , OrderedUniqueLoader )
Provides yaml . load alternative with preserved dictionary order .
25,805
def cf_safe_name ( name ) : alphanumeric = r"[a-zA-Z0-9]+" parts = re . findall ( alphanumeric , name ) return "" . join ( [ uppercase_first_letter ( part ) for part in parts ] )
Converts a name to a safe string for a Cloudformation resource .
25,806
def get_config_directory ( ) : from . commands . stacker import Stacker command = Stacker ( ) namespace = command . parse_args ( ) return os . path . dirname ( namespace . config . name )
Return the directory the config file is located in .
25,807
def read_value_from_path ( value ) : if value . startswith ( 'file://' ) : path = value . split ( 'file://' , 1 ) [ 1 ] config_directory = get_config_directory ( ) relative_path = os . path . join ( config_directory , path ) with open ( relative_path ) as read_file : value = read_file . read ( ) return value
Enables translators to read values from files .
25,808
def ensure_s3_bucket ( s3_client , bucket_name , bucket_region ) : try : s3_client . head_bucket ( Bucket = bucket_name ) except botocore . exceptions . ClientError as e : if e . response [ 'Error' ] [ 'Message' ] == "Not Found" : logger . debug ( "Creating bucket %s." , bucket_name ) create_args = { "Bucket" : bucket_name } location_constraint = s3_bucket_location_constraint ( bucket_region ) if location_constraint : create_args [ "CreateBucketConfiguration" ] = { "LocationConstraint" : location_constraint } s3_client . create_bucket ( ** create_args ) elif e . response [ 'Error' ] [ 'Message' ] == "Forbidden" : logger . exception ( "Access denied for bucket %s. Did " + "you remember to use a globally unique name?" , bucket_name ) raise else : logger . exception ( "Error creating bucket %s. Error %s" , bucket_name , e . response ) raise
Ensure an s3 bucket exists if it does not then create it .
25,809
def create_cache_directories ( self ) : if not os . path . isdir ( self . package_cache_dir ) : if not os . path . isdir ( self . stacker_cache_dir ) : os . mkdir ( self . stacker_cache_dir ) os . mkdir ( self . package_cache_dir )
Ensure that SourceProcessor cache directories exist .
25,810
def get_package_sources ( self ) : for config in self . sources . get ( 'local' , [ ] ) : self . fetch_local_package ( config = config ) for config in self . sources . get ( 's3' , [ ] ) : self . fetch_s3_package ( config = config ) for config in self . sources . get ( 'git' , [ ] ) : self . fetch_git_package ( config = config )
Make remote python packages available for local use .
25,811
def fetch_local_package ( self , config ) : self . update_paths_and_config ( config = config , pkg_dir_name = config [ 'source' ] , pkg_cache_dir = os . getcwd ( ) )
Make a local path available to current stacker config .
25,812
def fetch_git_package ( self , config ) : from git import Repo ref = self . determine_git_ref ( config ) dir_name = self . sanitize_git_path ( uri = config [ 'uri' ] , ref = ref ) cached_dir_path = os . path . join ( self . package_cache_dir , dir_name ) if not os . path . isdir ( cached_dir_path ) : logger . debug ( "Remote repo %s does not appear to have been " "previously downloaded - starting clone to %s" , config [ 'uri' ] , cached_dir_path ) tmp_dir = tempfile . mkdtemp ( prefix = 'stacker' ) try : tmp_repo_path = os . path . join ( tmp_dir , dir_name ) with Repo . clone_from ( config [ 'uri' ] , tmp_repo_path ) as repo : repo . head . reference = ref repo . head . reset ( index = True , working_tree = True ) shutil . move ( tmp_repo_path , self . package_cache_dir ) finally : shutil . rmtree ( tmp_dir ) else : logger . debug ( "Remote repo %s appears to have been previously " "cloned to %s -- bypassing download" , config [ 'uri' ] , cached_dir_path ) self . update_paths_and_config ( config = config , pkg_dir_name = dir_name )
Make a remote git repository available for local use .
25,813
def update_paths_and_config ( self , config , pkg_dir_name , pkg_cache_dir = None ) : if pkg_cache_dir is None : pkg_cache_dir = self . package_cache_dir cached_dir_path = os . path . join ( pkg_cache_dir , pkg_dir_name ) if config . get ( 'paths' ) : for path in config [ 'paths' ] : path_to_append = os . path . join ( cached_dir_path , path ) logger . debug ( "Appending \"%s\" to python sys.path" , path_to_append ) sys . path . append ( path_to_append ) else : sys . path . append ( cached_dir_path ) if config . get ( 'configs' ) : for config_filename in config [ 'configs' ] : self . configs_to_merge . append ( os . path . join ( cached_dir_path , config_filename ) )
Handle remote source defined sys . paths & configs .
25,814
def git_ls_remote ( self , uri , ref ) : logger . debug ( "Invoking git to retrieve commit id for repo %s..." , uri ) lsremote_output = subprocess . check_output ( [ 'git' , 'ls-remote' , uri , ref ] ) if b"\t" in lsremote_output : commit_id = lsremote_output . split ( b"\t" ) [ 0 ] logger . debug ( "Matching commit id found: %s" , commit_id ) return commit_id else : raise ValueError ( "Ref \"%s\" not found for repo %s." % ( ref , uri ) )
Determine the latest commit id for a given ref .
25,815
def determine_git_ref ( self , config ) : ref_config_keys = 0 for i in [ 'commit' , 'tag' , 'branch' ] : if config . get ( i ) : ref_config_keys += 1 if ref_config_keys > 1 : raise ImportError ( "Fetching remote git sources failed: " "conflicting revisions (e.g. 'commit', 'tag', " "'branch') specified for a package source" ) if config . get ( 'commit' ) : ref = config [ 'commit' ] elif config . get ( 'tag' ) : ref = config [ 'tag' ] else : ref = self . git_ls_remote ( config [ 'uri' ] , self . determine_git_ls_remote_ref ( config ) ) if sys . version_info [ 0 ] > 2 and isinstance ( ref , bytes ) : return ref . decode ( ) return ref
Determine the ref to be used for git checkout .
25,816
def sanitize_git_path ( self , uri , ref = None ) : if uri . endswith ( '.git' ) : dir_name = uri [ : - 4 ] else : dir_name = uri dir_name = self . sanitize_uri_path ( dir_name ) if ref is not None : dir_name += "-%s" % ref return dir_name
Take a git URI and ref and converts it to a directory safe path .
25,817
def run_command ( provider , context , command , capture = False , interactive = False , ignore_status = False , quiet = False , stdin = None , env = None , ** kwargs ) : if quiet and capture : raise ImproperlyConfigured ( __name__ + '.run_command' , 'Cannot enable `quiet` and `capture` options simultaneously' ) if quiet : out_err_type = _devnull ( ) elif capture : out_err_type = PIPE else : out_err_type = None if interactive : in_type = None elif stdin : in_type = PIPE else : in_type = _devnull ( ) if env : full_env = os . environ . copy ( ) full_env . update ( env ) env = full_env logger . info ( 'Running command: %s' , command ) proc = Popen ( command , stdin = in_type , stdout = out_err_type , stderr = out_err_type , env = env , ** kwargs ) try : out , err = proc . communicate ( stdin ) status = proc . wait ( ) if status == 0 or ignore_status : return { 'returncode' : proc . returncode , 'stdout' : out , 'stderr' : err } if logger . isEnabledFor ( logging . INFO ) : logger . warn ( 'Command failed with returncode %d' , status ) else : logger . warn ( 'Command failed with returncode %d: %s' , status , command ) return None finally : if proc . returncode is None : proc . kill ( )
Run a custom command as a hook
25,818
def ensure_keypair_exists ( provider , context , ** kwargs ) : keypair_name = kwargs [ "keypair" ] ssm_parameter_name = kwargs . get ( "ssm_parameter_name" ) ssm_key_id = kwargs . get ( "ssm_key_id" ) public_key_path = kwargs . get ( "public_key_path" ) if public_key_path and ssm_parameter_name : logger . error ( "public_key_path and ssm_parameter_name cannot be " "specified at the same time" ) return False session = get_session ( region = provider . region , profile = kwargs . get ( "profile" ) ) ec2 = session . client ( "ec2" ) keypair = get_existing_key_pair ( ec2 , keypair_name ) if keypair : return keypair if public_key_path : keypair = create_key_pair_from_public_key_file ( ec2 , keypair_name , public_key_path ) elif ssm_parameter_name : ssm = session . client ( 'ssm' ) keypair = create_key_pair_in_ssm ( ec2 , ssm , keypair_name , ssm_parameter_name , ssm_key_id ) else : action , path = interactive_prompt ( keypair_name ) if action == "import" : keypair = create_key_pair_from_public_key_file ( ec2 , keypair_name , path ) elif action == "create" : keypair = create_key_pair_local ( ec2 , keypair_name , path ) else : logger . warning ( "no action to find keypair, failing" ) if not keypair : return False return keypair
Ensure a specific keypair exists within AWS .
25,819
def get_fqn ( base_fqn , delimiter , name = None ) : if name and name . startswith ( "%s%s" % ( base_fqn , delimiter ) ) : return name return delimiter . join ( [ _f for _f in [ base_fqn , name ] if _f ] )
Return the fully qualified name of an object within this context .
25,820
def get_targets ( self ) : if not hasattr ( self , "_targets" ) : targets = [ ] for target_def in self . config . targets or [ ] : target = Target ( target_def ) targets . append ( target ) self . _targets = targets return self . _targets
Returns the named targets that are specified in the config .
25,821
def get_stacks ( self ) : if not hasattr ( self , "_stacks" ) : stacks = [ ] definitions = self . _get_stack_definitions ( ) for stack_def in definitions : stack = Stack ( definition = stack_def , context = self , mappings = self . mappings , force = stack_def . name in self . force_stacks , locked = stack_def . locked , enabled = stack_def . enabled , protected = stack_def . protected , ) stacks . append ( stack ) self . _stacks = stacks return self . _stacks
Get the stacks for the current action .
25,822
def set_hook_data ( self , key , data ) : if not isinstance ( data , collections . Mapping ) : raise ValueError ( "Hook (key: %s) data must be an instance of " "collections.Mapping (a dictionary for " "example)." % key ) if key in self . hook_data : raise KeyError ( "Hook data for key %s already exists, each hook " "must have a unique data_key." , key ) self . hook_data [ key ] = data
Set hook data for the given key .
25,823
def render_parse_load ( raw_config , environment = None , validate = True ) : pre_rendered = render ( raw_config , environment ) rendered = process_remote_sources ( pre_rendered , environment ) config = parse ( rendered ) if config . namespace is None : namespace = environment . get ( "namespace" ) if namespace : logger . warn ( "DEPRECATION WARNING: specifying namespace in the " "environment is deprecated. See " "https://stacker.readthedocs.io/en/latest/config.html" "#namespace " "for more info." ) config . namespace = namespace if validate : config . validate ( ) return load ( config )
Encapsulates the render - > parse - > validate - > load process .
25,824
def render ( raw_config , environment = None ) : t = Template ( raw_config ) buff = StringIO ( ) if not environment : environment = { } try : substituted = t . substitute ( environment ) except KeyError as e : raise exceptions . MissingEnvironment ( e . args [ 0 ] ) except ValueError : substituted = t . safe_substitute ( environment ) if not isinstance ( substituted , str ) : substituted = substituted . decode ( 'utf-8' ) buff . write ( substituted ) buff . seek ( 0 ) return buff . read ( )
Renders a config using it as a template with the environment .
25,825
def parse ( raw_config ) : config_dict = yaml_to_ordered_dict ( raw_config ) if config_dict : for top_level_key in [ 'stacks' , 'pre_build' , 'post_build' , 'pre_destroy' , 'post_destroy' ] : top_level_value = config_dict . get ( top_level_key ) if isinstance ( top_level_value , dict ) : tmp_list = [ ] for key , value in top_level_value . items ( ) : tmp_dict = copy . deepcopy ( value ) if top_level_key == 'stacks' : tmp_dict [ 'name' ] = key tmp_list . append ( tmp_dict ) config_dict [ top_level_key ] = tmp_list try : return Config ( config_dict , strict = True ) except SchematicsError as e : raise exceptions . InvalidConfig ( e . errors )
Parse a raw yaml formatted stacker config .
25,826
def load ( config ) : if config . sys_path : logger . debug ( "Appending %s to sys.path." , config . sys_path ) sys . path . append ( config . sys_path ) logger . debug ( "sys.path is now %s" , sys . path ) if config . lookups : for key , handler in config . lookups . items ( ) : register_lookup_handler ( key , handler ) return config
Loads a stacker configuration by modifying sys paths loading lookups etc .
25,827
def dump ( config ) : return yaml . safe_dump ( config . to_primitive ( ) , default_flow_style = False , encoding = 'utf-8' , allow_unicode = True )
Dumps a stacker Config object as yaml .
25,828
def process_remote_sources ( raw_config , environment = None ) : config = yaml . safe_load ( raw_config ) if config and config . get ( 'package_sources' ) : processor = SourceProcessor ( sources = config [ 'package_sources' ] , stacker_cache_dir = config . get ( 'stacker_cache_dir' ) ) processor . get_package_sources ( ) if processor . configs_to_merge : for i in processor . configs_to_merge : logger . debug ( "Merging in remote config \"%s\"" , i ) remote_config = yaml . safe_load ( open ( i ) ) config = merge_map ( remote_config , config ) if not environment : environment = { } return render ( str ( config ) , environment ) return raw_config
Stage remote package sources and merge in remote configs .
25,829
def _post ( url , headers , body , retries = 3 , timeout = 3.0 ) : retry = 0 out = None while out is None : try : out = requests . post ( url , headers = headers , data = body , timeout = timeout ) except ( requests . exceptions . Timeout , socket . timeout ) as exception : retry += 1 if retry == retries : raise requests . exceptions . Timeout ( exception . message ) return out
Try 3 times to request the content .
25,830
def _get_header ( soap_action ) : language , _ = locale . getdefaultlocale ( ) if language is None : language = '' else : language = language . replace ( '_' , '-' ) + ', ' header = { 'CONNECTION' : 'close' , 'ACCEPT-ENCODING' : 'gzip' , 'ACCEPT-LANGUAGE' : '{}en-US;q=0.9' . format ( language ) , 'Content-Type' : 'text/xml; charset="utf-8"' , 'SOAPACTION' : SOAP_ACTION [ soap_action ] } return header
Return the HTTP for SOAP Action .
25,831
def get_tracks ( self , search , start = 0 , max_items = 100 ) : return self . get_music_service_information ( 'tracks' , search , start , max_items )
Search for tracks .
25,832
def get_albums ( self , search , start = 0 , max_items = 100 ) : return self . get_music_service_information ( 'albums' , search , start , max_items )
Search for albums .
25,833
def get_artists ( self , search , start = 0 , max_items = 100 ) : return self . get_music_service_information ( 'artists' , search , start , max_items )
Search for artists .
25,834
def get_playlists ( self , search , start = 0 , max_items = 100 ) : return self . get_music_service_information ( 'playlists' , search , start , max_items )
Search for playlists .
25,835
def get_music_service_information ( self , search_type , search , start = 0 , max_items = 100 ) : if search_type not in [ 'artists' , 'albums' , 'tracks' , 'playlists' ] : message = 'The requested search {} is not valid' . format ( search_type ) raise ValueError ( message ) search_type = '{}earch' . format ( search_type ) parent_id = SEARCH_PREFIX . format ( search_type = search_type , search = search ) body = self . _search_body ( search_type , search , start , max_items ) headers = _get_header ( 'search' ) response = _post ( self . _url , headers , body , ** self . _http_vars ) self . _check_for_errors ( response ) result_dom = XML . fromstring ( response . text . encode ( 'utf-8' ) ) search_result = result_dom . find ( './/' + _ns_tag ( '' , 'searchResult' ) ) out = { 'item_list' : [ ] } for element in [ 'index' , 'count' , 'total' ] : out [ element ] = search_result . findtext ( _ns_tag ( '' , element ) ) if search_type == 'tracksearch' : item_name = 'mediaMetadata' else : item_name = 'mediaCollection' for element in search_result . findall ( _ns_tag ( '' , item_name ) ) : out [ 'item_list' ] . append ( get_ms_item ( element , self , parent_id ) ) return out
Search for music service information items .
25,836
def browse ( self , ms_item = None ) : if ms_item is not None and ms_item . service_id != self . _service_id : message = 'This music service item is not for this service' raise ValueError ( message ) if ms_item : body = self . _browse_body ( ms_item . item_id ) parent_id = ms_item . extended_id if parent_id is None : parent_id = '' else : body = self . _browse_body ( 'root' ) parent_id = '0' headers = _get_header ( 'get_metadata' ) response = _post ( self . _url , headers , body , ** self . _http_vars ) self . _check_for_errors ( response ) result_dom = XML . fromstring ( really_utf8 ( response . text ) ) xpath_search = './/' + _ns_tag ( '' , 'getMetadataResult' ) metadata_result = list ( result_dom . findall ( xpath_search ) ) if len ( metadata_result ) != 1 : raise UnknownXMLStructure ( 'The results XML has more than 1 \'getMetadataResult\'. This ' 'is unexpected and parsing will dis-continue.' ) metadata_result = metadata_result [ 0 ] out = { 'item_list' : [ ] } for element in [ 'index' , 'count' , 'total' ] : out [ element ] = metadata_result . findtext ( _ns_tag ( '' , element ) ) for result in metadata_result : if result . tag in [ _ns_tag ( '' , 'mediaCollection' ) , _ns_tag ( '' , 'mediaMetadata' ) ] : out [ 'item_list' ] . append ( get_ms_item ( result , self , parent_id ) ) return out
Return the sub - elements of item or of the root if item is None
25,837
def id_to_extended_id ( item_id , item_class ) : out = ID_PREFIX [ item_class ] if out : out += item_id return out
Return the extended ID from an ID .
25,838
def form_uri ( item_content , item_class ) : extension = None if 'mime_type' in item_content : extension = MIME_TYPE_TO_EXTENSION [ item_content [ 'mime_type' ] ] out = URIS . get ( item_class ) if out : out = out . format ( extension = extension , ** item_content ) return out
Form the URI for a music service element .
25,839
def _search_body ( self , search_type , search_term , start , max_items ) : xml = self . _base_body ( ) XML . SubElement ( xml , 's:Body' ) item_attrib = { 'xmlns' : 'http://www.sonos.com/Services/1.1' } search = XML . SubElement ( xml [ 1 ] , 'search' , item_attrib ) XML . SubElement ( search , 'id' ) . text = search_type XML . SubElement ( search , 'term' ) . text = search_term XML . SubElement ( search , 'index' ) . text = str ( start ) XML . SubElement ( search , 'count' ) . text = str ( max_items ) return XML . tostring ( xml )
Return the search XML body .
25,840
def _browse_body ( self , search_id ) : xml = self . _base_body ( ) XML . SubElement ( xml , 's:Body' ) item_attrib = { 'xmlns' : 'http://www.sonos.com/Services/1.1' } search = XML . SubElement ( xml [ 1 ] , 'getMetadata' , item_attrib ) XML . SubElement ( search , 'id' ) . text = search_id XML . SubElement ( search , 'index' ) . text = '0' XML . SubElement ( search , 'count' ) . text = '100' return XML . tostring ( xml )
Return the browse XML body .
25,841
def _check_for_errors ( self , response ) : if response . status_code != 200 : xml_error = really_utf8 ( response . text ) error_dom = XML . fromstring ( xml_error ) fault = error_dom . find ( './/' + _ns_tag ( 's' , 'Fault' ) ) error_description = fault . find ( 'faultstring' ) . text error_code = EXCEPTION_STR_TO_CODE [ error_description ] message = 'UPnP Error {} received: {} from {}' . format ( error_code , error_description , self . _url ) raise SoCoUPnPException ( message = message , error_code = error_code , error_description = error_description , error_xml = really_utf8 ( response . text ) )
Check a response for errors .
25,842
def desc_from_uri ( uri ) : if ":" in uri : _ , uri = uri . split ( ":" , 1 ) query_string = parse_qs ( urlparse ( uri , 'http' ) . query ) if query_string . get ( 'sn' ) : account_serial_number = query_string [ 'sn' ] [ 0 ] try : account = Account . get_accounts ( ) [ account_serial_number ] desc = "SA_RINCON{}_{}" . format ( account . service_type , account . username ) return desc except KeyError : pass if query_string . get ( 'sid' ) : service_id = query_string [ 'sid' ] [ 0 ] for service in MusicService . _get_music_services_data ( ) . values ( ) : if service_id == service [ "ServiceID" ] : service_type = service [ "ServiceType" ] account = Account . get_accounts_for_service ( service_type ) if not account : break account = account [ 0 ] desc = "SA_RINCON{}_{}" . format ( account . service_type , account . username ) return desc desc = 'RINCON_AssociatedZPUDN' return desc
Create the content of DIDL desc element from a uri .
25,843
def get_soap_header ( self ) : if self . _cached_soap_header is not None : return self . _cached_soap_header music_service = self . music_service credentials_header = XML . Element ( "credentials" , { 'xmlns' : "http://www.sonos.com/Services/1.1" } ) device_id = XML . SubElement ( credentials_header , 'deviceId' ) device_id . text = self . _device_id device_provider = XML . SubElement ( credentials_header , 'deviceProvider' ) device_provider . text = 'Sonos' if music_service . account . oa_device_id : login_token = XML . Element ( 'loginToken' ) token = XML . SubElement ( login_token , 'token' ) token . text = music_service . account . oa_device_id key = XML . SubElement ( login_token , 'key' ) key . text = music_service . account . key household_id = XML . SubElement ( login_token , 'householdId' ) household_id . text = self . _device . household_id credentials_header . append ( login_token ) elif music_service . auth_type in [ 'DeviceLink' , 'UserId' ] : session_id = self . _device . musicServices . GetSessionId ( [ ( 'ServiceId' , music_service . service_id ) , ( 'Username' , music_service . account . username ) ] ) [ 'SessionId' ] session_elt = XML . Element ( 'sessionId' ) session_elt . text = session_id credentials_header . append ( session_elt ) self . _cached_soap_header = XML . tostring ( credentials_header , encoding = 'utf-8' ) . decode ( encoding = 'utf-8' ) return self . _cached_soap_header
Generate the SOAP authentication header for the related service .
25,844
def call ( self , method , args = None ) : message = SoapMessage ( endpoint = self . endpoint , method = method , parameters = [ ] if args is None else args , http_headers = self . http_headers , soap_action = "http://www.sonos.com/Services/1" ".1#{0}" . format ( method ) , soap_header = self . get_soap_header ( ) , namespace = self . namespace , timeout = self . timeout ) try : result_elt = message . call ( ) except SoapFault as exc : if 'Client.TokenRefreshRequired' in exc . faultcode : log . debug ( 'Token refresh required. Trying again' ) self . _cached_soap_header = None auth_token = exc . detail . findtext ( './/authToken' ) private_key = exc . detail . findtext ( './/privateKey' ) self . music_service . account . oa_device_id = auth_token self . music_service . account . key = private_key message = SoapMessage ( endpoint = self . endpoint , method = method , parameters = args , http_headers = self . http_headers , soap_action = "http://www.sonos.com/Services/1" ".1#{0}" . format ( method ) , soap_header = self . get_soap_header ( ) , namespace = self . namespace , timeout = self . timeout ) result_elt = message . call ( ) else : raise MusicServiceException ( exc . faultstring , exc . faultcode ) result = list ( parse ( XML . tostring ( result_elt ) , process_namespaces = True , namespaces = { 'http://www.sonos.com/Services/1.1' : None } ) . values ( ) ) [ 0 ] return result if result is not None else { }
Call a method on the server .
25,845
def _get_music_services_data_xml ( soco = None ) : device = soco or discovery . any_soco ( ) log . debug ( "Fetching music services data from %s" , device ) available_services = device . musicServices . ListAvailableServices ( ) descriptor_list_xml = available_services [ 'AvailableServiceDescriptorList' ] log . debug ( "Services descriptor list: %s" , descriptor_list_xml ) return descriptor_list_xml
Fetch the music services data xml from a Sonos device .
25,846
def _get_music_services_data ( cls ) : if cls . _music_services_data is not None : return cls . _music_services_data result = { } root = XML . fromstring ( cls . _get_music_services_data_xml ( ) . encode ( 'utf-8' ) ) services = root . findall ( 'Service' ) for service in services : result_value = service . attrib . copy ( ) name = service . get ( 'Name' ) result_value [ 'Name' ] = name auth_element = ( service . find ( 'Policy' ) ) auth = auth_element . attrib result_value . update ( auth ) presentation_element = ( service . find ( './/PresentationMap' ) ) if presentation_element is not None : result_value [ 'PresentationMapUri' ] = presentation_element . get ( 'Uri' ) result_value [ 'ServiceID' ] = service . get ( 'Id' ) service_type = str ( int ( service . get ( 'Id' ) ) * 256 + 7 ) result_value [ 'ServiceType' ] = service_type result [ service_type ] = result_value cls . _music_services_data = result return result
Parse raw account data xml into a useful python datastructure .
25,847
def get_subscribed_services_names ( cls ) : accounts_for_service = Account . get_accounts_for_service service_data = cls . _get_music_services_data ( ) . values ( ) return [ service [ 'Name' ] for service in service_data if len ( accounts_for_service ( service [ 'ServiceType' ] ) ) > 0 ]
Get a list of the names of all subscribed music services .
25,848
def get_data_for_name ( cls , service_name ) : for service in cls . _get_music_services_data ( ) . values ( ) : if service_name == service [ "Name" ] : return service raise MusicServiceException ( "Unknown music service: '%s'" % service_name )
Get the data relating to a named music service .
25,849
def _get_search_prefix_map ( self ) : if self . _search_prefix_map is not None : return self . _search_prefix_map self . _search_prefix_map = { } if self . service_name == "TuneIn" : self . _search_prefix_map = { 'stations' : 'search:station' , 'shows' : 'search:show' , 'hosts' : 'search:host' , } return self . _search_prefix_map if self . presentation_map_uri is None : return self . _search_prefix_map log . info ( 'Fetching presentation map from %s' , self . presentation_map_uri ) pmap = requests . get ( self . presentation_map_uri , timeout = 9 ) pmap_root = XML . fromstring ( pmap . content ) categories = pmap_root . findall ( ".//SearchCategories/Category" ) if categories is None : return self . _search_prefix_map for cat in categories : self . _search_prefix_map [ cat . get ( 'id' ) ] = cat . get ( 'mappedId' ) custom_categories = pmap_root . findall ( ".//SearchCategories/CustomCategory" ) for cat in custom_categories : self . _search_prefix_map [ cat . get ( 'stringId' ) ] = cat . get ( 'mappedId' ) return self . _search_prefix_map
Fetch and parse the service search category mapping .
25,850
def sonos_uri_from_id ( self , item_id ) : item_id = quote_url ( item_id . encode ( 'utf-8' ) ) account = self . account result = "soco://{}?sid={}&sn={}" . format ( item_id , self . service_id , account . serial_number ) return result
Get a uri which can be sent for playing .
25,851
def get_metadata ( self , item = 'root' , index = 0 , count = 100 , recursive = False ) : if isinstance ( item , MusicServiceItem ) : item_id = item . id else : item_id = item response = self . soap_client . call ( 'getMetadata' , [ ( 'id' , item_id ) , ( 'index' , index ) , ( 'count' , count ) , ( 'recursive' , 1 if recursive else 0 ) ] ) return parse_response ( self , response , 'browse' )
Get metadata for a container or item .
25,852
def search ( self , category , term = '' , index = 0 , count = 100 ) : search_category = self . _get_search_prefix_map ( ) . get ( category , None ) if search_category is None : raise MusicServiceException ( "%s does not support the '%s' search category" % ( self . service_name , category ) ) response = self . soap_client . call ( 'search' , [ ( 'id' , search_category ) , ( 'term' , term ) , ( 'index' , index ) , ( 'count' , count ) ] ) return parse_response ( self , response , category )
Search for an item in a category .
25,853
def get_media_metadata ( self , item_id ) : response = self . soap_client . call ( 'getMediaMetadata' , [ ( 'id' , item_id ) ] ) return response . get ( 'getMediaMetadataResult' , None )
Get metadata for a media item .
25,854
def get_media_uri ( self , item_id ) : response = self . soap_client . call ( 'getMediaURI' , [ ( 'id' , item_id ) ] ) return response . get ( 'getMediaURIResult' , None )
Get a streaming URI for an item .
25,855
def get_extended_metadata ( self , item_id ) : response = self . soap_client . call ( 'getExtendedMetadata' , [ ( 'id' , item_id ) ] ) return response . get ( 'getExtendedMetadataResult' , None )
Get extended metadata for a media item such as related items .
25,856
def get_extended_metadata_text ( self , item_id , metadata_type ) : response = self . soap_client . call ( 'getExtendedMetadataText' , [ ( 'id' , item_id ) , ( 'type' , metadata_type ) ] ) return response . get ( 'getExtendedMetadataTextResult' , None )
Get extended metadata text for a media item .
25,857
def get_class ( class_key ) : if class_key not in CLASSES : for basecls in ( MediaMetadata , MediaCollection ) : if class_key . startswith ( basecls . __name__ ) : class_name = 'MS' + class_key . replace ( basecls . __name__ , '' ) if sys . version_info [ 0 ] == 2 : class_name = class_name . encode ( 'ascii' ) CLASSES [ class_key ] = type ( class_name , ( basecls , ) , { } ) _LOG . info ( 'Class %s created' , CLASSES [ class_key ] ) return CLASSES [ class_key ]
Form a music service data structure class from the class key
25,858
def parse_response ( service , response , search_type ) : _LOG . debug ( 'Parse response "%s" from service "%s" of type "%s"' , response , service , search_type ) items = [ ] if 'searchResult' in response : response = response [ 'searchResult' ] elif 'getMetadataResult' in response : response = response [ 'getMetadataResult' ] else : raise ValueError ( '"response" should contain either the key ' '"searchResult" or "getMetadataResult"' ) search_metadata = { 'number_returned' : response [ 'count' ] , 'total_matches' : None , 'search_type' : search_type , 'update_id' : None , } for result_type in ( 'mediaCollection' , 'mediaMetadata' ) : result_type_proper = result_type [ 0 ] . upper ( ) + result_type [ 1 : ] raw_items = response . get ( result_type , [ ] ) if isinstance ( raw_items , OrderedDict ) : raw_items = [ raw_items ] for raw_item in raw_items : class_key = result_type_proper + raw_item [ 'itemType' ] . title ( ) cls = get_class ( class_key ) items . append ( cls . from_music_service ( service , raw_item ) ) return SearchResult ( items , ** search_metadata )
Parse the response to a music service query and return a SearchResult
25,859
def form_uri ( item_id , service , is_track ) : if is_track : uri = service . sonos_uri_from_id ( item_id ) else : uri = 'x-rincon-cpcontainer:' + item_id return uri
Form and return a music service item uri
25,860
def bool_str ( string ) : if string not in BOOL_STRS : raise ValueError ( 'Invalid boolean string: "{}"' . format ( string ) ) return True if string == 'true' else False
Returns a boolean from a string imput of true or false
25,861
def _get_account_xml ( soco ) : device = soco or discovery . any_soco ( ) log . debug ( "Fetching account data from %s" , device ) settings_url = "http://{}:1400/status/accounts" . format ( device . ip_address ) result = requests . get ( settings_url ) . content log . debug ( "Account data: %s" , result ) return result
Fetch the account data from a Sonos device .
25,862
def get_accounts ( cls , soco = None ) : root = XML . fromstring ( cls . _get_account_xml ( soco ) ) xml_accounts = root . findall ( './/Account' ) result = { } for xml_account in xml_accounts : serial_number = xml_account . get ( 'SerialNum' ) is_deleted = True if xml_account . get ( 'Deleted' ) == '1' else False if cls . _all_accounts . get ( serial_number ) : if is_deleted : del cls . _all_accounts [ serial_number ] continue else : account = cls . _all_accounts . get ( serial_number ) else : if is_deleted : continue account = Account ( ) account . serial_number = serial_number cls . _all_accounts [ serial_number ] = account account . service_type = xml_account . get ( 'Type' ) account . deleted = is_deleted account . username = xml_account . findtext ( 'UN' ) account . metadata = xml_account . findtext ( 'MD' ) account . nickname = xml_account . findtext ( 'NN' ) account . oa_device_id = xml_account . findtext ( 'OADevID' ) account . key = xml_account . findtext ( 'Key' ) result [ serial_number ] = account tunein = Account ( ) tunein . service_type = '65031' tunein . deleted = False tunein . username = '' tunein . metadata = '' tunein . nickname = '' tunein . oa_device_id = '' tunein . key = '' tunein . serial_number = '0' result [ '0' ] = tunein return result
Get all accounts known to the Sonos system .
25,863
def get_accounts_for_service ( cls , service_type ) : return [ a for a in cls . get_accounts ( ) . values ( ) if a . service_type == service_type ]
Get a list of accounts for a given music service .
25,864
def play_alert ( zones , alert_uri , alert_volume = 20 , alert_duration = 0 , fade_back = False ) : for zone in zones : zone . snap = Snapshot ( zone ) zone . snap . snapshot ( ) print ( 'snapshot of zone: {}' . format ( zone . player_name ) ) for zone in zones : if zone . is_coordinator : if not zone . is_playing_tv : trans_state = zone . get_current_transport_info ( ) if trans_state [ 'current_transport_state' ] == 'PLAYING' : zone . pause ( ) zone . volume = alert_volume zone . mute = False print ( 'will play: {} on all coordinators' . format ( alert_uri ) ) for zone in zones : if zone . is_coordinator : zone . play_uri ( uri = alert_uri , title = 'Sonos Alert' ) time . sleep ( alert_duration ) for zone in zones : print ( 'restoring {}' . format ( zone . player_name ) ) zone . snap . restore ( fade = fade_back )
Demo function using soco . snapshot across multiple Sonos players .
25,865
def get ( self , * args , ** kwargs ) : if not self . enabled : return None cache_key = self . make_key ( args , kwargs ) with self . _cache_lock : if cache_key in self . _cache : expirytime , item = self . _cache [ cache_key ] if expirytime >= time ( ) : return item else : del self . _cache [ cache_key ] return None
Get an item from the cache for this combination of args and kwargs .
25,866
def put ( self , item , * args , ** kwargs ) : if not self . enabled : return timeout = kwargs . pop ( 'timeout' , None ) if timeout is None : timeout = self . default_timeout cache_key = self . make_key ( args , kwargs ) with self . _cache_lock : self . _cache [ cache_key ] = ( time ( ) + timeout , item )
Put an item into the cache for this combination of args and kwargs .
25,867
def delete ( self , * args , ** kwargs ) : cache_key = self . make_key ( args , kwargs ) with self . _cache_lock : try : del self . _cache [ cache_key ] except KeyError : pass
Delete an item from the cache for this combination of args and kwargs .
25,868
def build_album_art_full_uri ( self , url ) : if not url . startswith ( ( 'http:' , 'https:' ) ) : url = 'http://' + self . soco . ip_address + ':1400' + url return url
Ensure an Album Art URI is an absolute URI .
25,869
def _update_album_art_to_full_uri ( self , item ) : if getattr ( item , 'album_art_uri' , False ) : item . album_art_uri = self . build_album_art_full_uri ( item . album_art_uri )
Update an item s Album Art URI to be an absolute URI .
25,870
def get_music_library_information ( self , search_type , start = 0 , max_items = 100 , full_album_art_uri = False , search_term = None , subcategories = None , complete_result = False ) : search = self . SEARCH_TRANSLATION [ search_type ] if subcategories is not None : for category in subcategories : search += '/' + url_escape_path ( really_unicode ( category ) ) if search_term is not None : search += ':' + url_escape_path ( really_unicode ( search_term ) ) item_list = [ ] metadata = { 'total_matches' : 100000 } while len ( item_list ) < metadata [ 'total_matches' ] : if complete_result : start , max_items = len ( item_list ) , 100000 try : response , metadata = self . _music_lib_search ( search , start , max_items ) except SoCoUPnPException as exception : if exception . error_code == '701' : return SearchResult ( [ ] , search_type , 0 , 0 , None ) else : raise exception items = from_didl_string ( response [ 'Result' ] ) for item in items : if full_album_art_uri : self . _update_album_art_to_full_uri ( item ) item_list . append ( item ) if not complete_result : break metadata [ 'search_type' ] = search_type if complete_result : metadata [ 'number_returned' ] = len ( item_list ) return SearchResult ( item_list , ** metadata )
Retrieve music information objects from the music library .
25,871
def _music_lib_search ( self , search , start , max_items ) : response = self . contentDirectory . Browse ( [ ( 'ObjectID' , search ) , ( 'BrowseFlag' , 'BrowseDirectChildren' ) , ( 'Filter' , '*' ) , ( 'StartingIndex' , start ) , ( 'RequestedCount' , max_items ) , ( 'SortCriteria' , '' ) ] ) metadata = { } for tag in [ 'NumberReturned' , 'TotalMatches' , 'UpdateID' ] : metadata [ camel_to_underscore ( tag ) ] = int ( response [ tag ] ) return response , metadata
Perform a music library search and extract search numbers .
25,872
def search_track ( self , artist , album = None , track = None , full_album_art_uri = False ) : subcategories = [ artist ] subcategories . append ( album or '' ) result = self . get_album_artists ( full_album_art_uri = full_album_art_uri , subcategories = subcategories , search_term = track , complete_result = True ) result . _metadata [ 'search_type' ] = 'search_track' return result
Search for an artist an artist s albums or specific track .
25,873
def get_albums_for_artist ( self , artist , full_album_art_uri = False ) : subcategories = [ artist ] result = self . get_album_artists ( full_album_art_uri = full_album_art_uri , subcategories = subcategories , complete_result = True ) reduced = [ item for item in result if item . __class__ == DidlMusicAlbum ] result [ : ] = reduced result . _metadata . update ( { 'item_list' : reduced , 'search_type' : 'albums_for_artist' , 'number_returned' : len ( reduced ) , 'total_matches' : len ( reduced ) } ) return result
Get an artist s albums .
25,874
def get_tracks_for_album ( self , artist , album , full_album_art_uri = False ) : subcategories = [ artist , album ] result = self . get_album_artists ( full_album_art_uri = full_album_art_uri , subcategories = subcategories , complete_result = True ) result . _metadata [ 'search_type' ] = 'tracks_for_album' return result
Get the tracks of an artist s album .
25,875
def any_soco ( ) : cls = config . SOCO_CLASS try : device = next ( d for d in cls . _instances [ cls . _class_group ] . values ( ) if d . is_visible ) except ( KeyError , StopIteration ) : devices = discover ( ) return None if devices is None else devices . pop ( ) return device
Return any visible soco device for when it doesn t matter which .
25,876
def wrap_arguments ( args = None ) : if args is None : args = [ ] tags = [ ] for name , value in args : tag = "<{name}>{value}</{name}>" . format ( name = name , value = escape ( "%s" % value , { '"' : "&quot;" } ) ) tags . append ( tag ) xml = "" . join ( tags ) return xml
Wrap a list of tuples in xml ready to pass into a SOAP request .
25,877
def unwrap_arguments ( xml_response ) : xml_response = xml_response . encode ( 'utf-8' ) try : tree = XML . fromstring ( xml_response ) except XML . ParseError : filtered = illegal_xml_re . sub ( '' , xml_response . decode ( 'utf-8' ) ) . encode ( 'utf-8' ) tree = XML . fromstring ( filtered ) action_response = tree . find ( "{http://schemas.xmlsoap.org/soap/envelope/}Body" ) [ 0 ] return dict ( ( i . tag , i . text or "" ) for i in action_response )
Extract arguments and their values from a SOAP response .
25,878
def compose_args ( self , action_name , in_argdict ) : for action in self . actions : if action . name == action_name : break else : raise AttributeError ( 'Unknown Action: {0}' . format ( action_name ) ) unexpected = set ( in_argdict ) - set ( argument . name for argument in action . in_args ) if unexpected : raise ValueError ( "Unexpected argument '{0}'. Method signature: {1}" . format ( next ( iter ( unexpected ) ) , str ( action ) ) ) composed = [ ] for argument in action . in_args : name = argument . name if name in in_argdict : composed . append ( ( name , in_argdict [ name ] ) ) continue if name in self . DEFAULT_ARGS : composed . append ( ( name , self . DEFAULT_ARGS [ name ] ) ) continue if argument . vartype . default is not None : composed . append ( ( name , argument . vartype . default ) ) raise ValueError ( "Missing argument '{0}'. Method signature: {1}" . format ( argument . name , str ( action ) ) ) return composed
Compose the argument list from an argument dictionary with respect for default values .
25,879
def build_command ( self , action , args = None ) : arguments = self . wrap_arguments ( args ) body = self . soap_body_template . format ( arguments = arguments , action = action , service_type = self . service_type , version = self . version ) soap_action_template = "urn:schemas-upnp-org:service:{service_type}:{version}#{action}" soap_action = soap_action_template . format ( service_type = self . service_type , version = self . version , action = action ) headers = { 'Content-Type' : 'text/xml; charset="utf-8"' , 'SOAPACTION' : soap_action } return ( headers , body )
Build a SOAP request .
25,880
def send_command ( self , action , args = None , cache = None , cache_timeout = None , ** kwargs ) : if args is None : args = self . compose_args ( action , kwargs ) if cache is None : cache = self . cache result = cache . get ( action , args ) if result is not None : log . debug ( "Cache hit" ) return result headers , body = self . build_command ( action , args ) log . info ( "Sending %s %s to %s" , action , args , self . soco . ip_address ) log . debug ( "Sending %s, %s" , headers , prettify ( body ) ) response = requests . post ( self . base_url + self . control_url , headers = headers , data = body . encode ( 'utf-8' ) ) log . debug ( "Received %s, %s" , response . headers , response . text ) status = response . status_code log . info ( "Received status %s from %s" , status , self . soco . ip_address ) if status == 200 : result = self . unwrap_arguments ( response . text ) or True cache . put ( result , action , args , timeout = cache_timeout ) return result elif status == 500 : try : self . handle_upnp_error ( response . text ) except Exception as exc : log . exception ( str ( exc ) ) raise else : response . raise_for_status ( ) return None
Send a command to a Sonos device .
25,881
def handle_upnp_error ( self , xml_error ) : xml_error = xml_error . encode ( 'utf-8' ) error = XML . fromstring ( xml_error ) log . debug ( "Error %s" , xml_error ) error_code = error . findtext ( './/{urn:schemas-upnp-org:control-1-0}errorCode' ) if error_code is not None : description = self . UPNP_ERRORS . get ( int ( error_code ) , '' ) raise SoCoUPnPException ( message = 'UPnP Error {} received: {} from {}' . format ( error_code , description , self . soco . ip_address ) , error_code = error_code , error_description = description , error_xml = xml_error ) else : log . error ( "Unknown error received from %s" , self . soco . ip_address ) raise UnknownSoCoException ( xml_error )
Disect a UPnP error and raise an appropriate exception .
25,882
def subscribe ( self , requested_timeout = None , auto_renew = False , event_queue = None ) : subscription = Subscription ( self , event_queue ) subscription . subscribe ( requested_timeout = requested_timeout , auto_renew = auto_renew ) return subscription
Subscribe to the service s events .
25,883
def actions ( self ) : if self . _actions is None : self . _actions = list ( self . iter_actions ( ) ) return self . _actions
The service s actions with their arguments .
25,884
def event_vars ( self ) : if self . _event_vars is None : self . _event_vars = list ( self . iter_event_vars ( ) ) return self . _event_vars
The service s eventable variables .
25,885
def iter_event_vars ( self ) : ns = '{urn:schemas-upnp-org:service-1-0}' scpd_body = requests . get ( self . base_url + self . scpd_url ) . text tree = XML . fromstring ( scpd_body . encode ( 'utf-8' ) ) statevars = tree . findall ( '{}stateVariable' . format ( ns ) ) for state in statevars : if state . attrib [ 'sendEvents' ] == "yes" : name = state . findtext ( '{}name' . format ( ns ) ) vartype = state . findtext ( '{}dataType' . format ( ns ) ) yield ( name , vartype )
Yield the services eventable variables .
25,886
def GetZoneGroupState ( self , * args , ** kwargs ) : kwargs [ 'cache' ] = kwargs . get ( 'cache' , zone_group_state_shared_cache ) return self . send_command ( 'GetZoneGroupState' , * args , ** kwargs )
Overrides default handling to use the global shared zone group state cache unless another cache is specified .
25,887
def add_random_file_from_present_folder ( machine_ip , port , zone ) : music_files = [ ] print ( 'Looking for music files' ) for path , dirs , files in os . walk ( '.' ) : for file_ in files : if not os . path . splitext ( file_ ) [ 1 ] . startswith ( '.py' ) : music_files . append ( os . path . relpath ( os . path . join ( path , file_ ) ) ) print ( 'Found:' , music_files [ - 1 ] ) random_file = choice ( music_files ) random_file = os . path . join ( * [ quote ( part ) for part in os . path . split ( random_file ) ] ) print ( '\nPlaying random file:' , random_file ) netpath = 'http://{}:{}/{}' . format ( machine_ip , port , random_file ) number_in_queue = zone . add_uri_to_queue ( netpath ) zone . play_from_queue ( number_in_queue - 1 )
Add a random non - py file from this folder and subfolders to soco
25,888
def to_element ( self ) : if not self . protocol_info : raise DIDLMetadataError ( 'Could not create Element for this' 'resource:' 'protocolInfo not set (required).' ) root = XML . Element ( 'res' ) root . attrib [ 'protocolInfo' ] = self . protocol_info if self . import_uri is not None : root . attrib [ 'importUri' ] = self . import_uri if self . size is not None : root . attrib [ 'size' ] = str ( self . size ) if self . duration is not None : root . attrib [ 'duration' ] = self . duration if self . bitrate is not None : root . attrib [ 'bitrate' ] = str ( self . bitrate ) if self . sample_frequency is not None : root . attrib [ 'sampleFrequency' ] = str ( self . sample_frequency ) if self . bits_per_sample is not None : root . attrib [ 'bitsPerSample' ] = str ( self . bits_per_sample ) if self . nr_audio_channels is not None : root . attrib [ 'nrAudioChannels' ] = str ( self . nr_audio_channels ) if self . resolution is not None : root . attrib [ 'resolution' ] = self . resolution if self . color_depth is not None : root . attrib [ 'colorDepth' ] = str ( self . color_depth ) if self . protection is not None : root . attrib [ 'protection' ] = self . protection root . text = self . uri return root
Return an ElementTree Element based on this resource .
25,889
def to_dict ( self , remove_nones = False ) : content = { 'uri' : self . uri , 'protocol_info' : self . protocol_info , 'import_uri' : self . import_uri , 'size' : self . size , 'duration' : self . duration , 'bitrate' : self . bitrate , 'sample_frequency' : self . sample_frequency , 'bits_per_sample' : self . bits_per_sample , 'nr_audio_channels' : self . nr_audio_channels , 'resolution' : self . resolution , 'color_depth' : self . color_depth , 'protection' : self . protection , } if remove_nones : nones = [ k for k in content if content [ k ] is None ] for k in nones : del content [ k ] return content
Return a dict representation of the DidlResource .
25,890
def from_element ( cls , element ) : tag = element . tag if not ( tag . endswith ( 'item' ) or tag . endswith ( 'container' ) ) : raise DIDLMetadataError ( "Wrong element. Expected <item> or <container>," " got <{0}> for class {1}'" . format ( tag , cls . item_class ) ) item_class = element . find ( ns_tag ( 'upnp' , 'class' ) ) . text if '.#' in item_class : item_class = item_class [ : item_class . find ( '.#' ) ] if item_class != cls . item_class : raise DIDLMetadataError ( "UPnP class is incorrect. Expected '{0}'," " got '{1}'" . format ( cls . item_class , item_class ) ) item_id = element . get ( 'id' , None ) if item_id is None : raise DIDLMetadataError ( "Missing id attribute" ) item_id = really_unicode ( item_id ) parent_id = element . get ( 'parentID' , None ) if parent_id is None : raise DIDLMetadataError ( "Missing parentID attribute" ) parent_id = really_unicode ( parent_id ) restricted = element . get ( 'restricted' , None ) restricted = False if restricted in [ 0 , 'false' , 'False' ] else True title_elt = element . find ( ns_tag ( 'dc' , 'title' ) ) if title_elt is None or not title_elt . text : title = '' else : title = really_unicode ( title_elt . text ) resources = [ ] for res_elt in element . findall ( ns_tag ( '' , 'res' ) ) : resources . append ( DidlResource . from_element ( res_elt ) ) desc = element . findtext ( ns_tag ( '' , 'desc' ) ) content = { } for key , value in cls . _translation . items ( ) : result = element . findtext ( ns_tag ( * value ) ) if result is not None : content [ key ] = really_unicode ( result ) if content . get ( 'original_track_number' ) is not None : content [ 'original_track_number' ] = int ( content [ 'original_track_number' ] ) return cls ( title = title , parent_id = parent_id , item_id = item_id , restricted = restricted , resources = resources , desc = desc , ** content )
Create an instance of this class from an ElementTree xml Element .
25,891
def to_dict ( self , remove_nones = False ) : content = { } for key in self . _translation : if hasattr ( self , key ) : content [ key ] = getattr ( self , key ) content [ 'parent_id' ] = self . parent_id content [ 'item_id' ] = self . item_id content [ 'restricted' ] = self . restricted content [ 'title' ] = self . title if self . resources != [ ] : content [ 'resources' ] = [ resource . to_dict ( remove_nones = remove_nones ) for resource in self . resources ] content [ 'desc' ] = self . desc return content
Return the dict representation of the instance .
25,892
def set_uri ( self , uri , resource_nr = 0 , protocol_info = None ) : try : self . resources [ resource_nr ] . uri = uri if protocol_info is not None : self . resources [ resource_nr ] . protocol_info = protocol_info except IndexError : if protocol_info is None : protocol_info = uri [ : uri . index ( ':' ) ] + ':*:*:*' self . resources . append ( DidlResource ( uri , protocol_info ) )
Set a resource uri for this instance . If no resource exists create a new one with the given protocol info .
25,893
def reference ( self ) : global _FROM_DIDL_STRING_FUNCTION if not _FROM_DIDL_STRING_FUNCTION : from . import data_structures_entry _FROM_DIDL_STRING_FUNCTION = data_structures_entry . from_didl_string ref = _FROM_DIDL_STRING_FUNCTION ( getattr ( self , 'resource_meta_data' ) ) [ 0 ] ref . resources = self . resources return ref
The Didl object this favorite refers to .
25,894
def do_NOTIFY ( self ) : timestamp = time . time ( ) headers = requests . structures . CaseInsensitiveDict ( self . headers ) seq = headers [ 'seq' ] sid = headers [ 'sid' ] content_length = int ( headers [ 'content-length' ] ) content = self . rfile . read ( content_length ) with _subscriptions_lock : subscription = _subscriptions . get ( sid ) if subscription : service = subscription . service log . info ( "Event %s received for %s service on thread %s at %s" , seq , service . service_id , threading . current_thread ( ) , timestamp ) log . debug ( "Event content: %s" , content ) variables = parse_event_xml ( content ) event = Event ( sid , seq , service , timestamp , variables ) service . _update_cache_on_event ( event ) subscription . events . put ( event ) else : log . info ( "No service registered for %s" , sid ) self . send_response ( 200 ) self . end_headers ( )
Serve a NOTIFY request .
25,895
def stop ( self ) : self . _listener_thread . stop_flag . set ( ) try : urlopen ( 'http://%s:%s/' % ( self . address [ 0 ] , self . address [ 1 ] ) ) except URLError : pass self . _listener_thread . join ( ) self . is_running = False log . info ( "Event listener stopped" )
Stop the event listener .
25,896
def subscribe ( self , requested_timeout = None , auto_renew = False ) : class AutoRenewThread ( threading . Thread ) : def __init__ ( self , interval , stop_flag , sub , * args , ** kwargs ) : super ( AutoRenewThread , self ) . __init__ ( * args , ** kwargs ) self . interval = interval self . sub = sub self . stop_flag = stop_flag self . daemon = True def run ( self ) : sub = self . sub stop_flag = self . stop_flag interval = self . interval while not stop_flag . wait ( interval ) : log . info ( "Autorenewing subscription %s" , sub . sid ) sub . renew ( ) self . requested_timeout = requested_timeout if self . _has_been_unsubscribed : raise SoCoException ( 'Cannot resubscribe instance once unsubscribed' ) service = self . service if not event_listener . is_running : event_listener . start ( service . soco ) ip_address , port = event_listener . address if config . EVENT_ADVERTISE_IP : ip_address = config . EVENT_ADVERTISE_IP headers = { 'Callback' : '<http://{}:{}>' . format ( ip_address , port ) , 'NT' : 'upnp:event' } if requested_timeout is not None : headers [ "TIMEOUT" ] = "Second-{}" . format ( requested_timeout ) with _subscriptions_lock : response = requests . request ( 'SUBSCRIBE' , service . base_url + service . event_subscription_url , headers = headers ) response . raise_for_status ( ) self . sid = response . headers [ 'sid' ] timeout = response . headers [ 'timeout' ] if timeout . lower ( ) == 'infinite' : self . timeout = None else : self . timeout = int ( timeout . lstrip ( 'Second-' ) ) self . _timestamp = time . time ( ) self . is_subscribed = True log . info ( "Subscribed to %s, sid: %s" , service . base_url + service . event_subscription_url , self . sid ) _subscriptions [ self . sid ] = self atexit . register ( self . unsubscribe ) if not auto_renew : return interval = self . timeout * 85 / 100 auto_renew_thread = AutoRenewThread ( interval , self . _auto_renew_thread_flag , self ) auto_renew_thread . start ( )
Subscribe to the service .
25,897
def renew ( self , requested_timeout = None ) : if self . _has_been_unsubscribed : raise SoCoException ( 'Cannot renew subscription once unsubscribed' ) if not self . is_subscribed : raise SoCoException ( 'Cannot renew subscription before subscribing' ) if self . time_left == 0 : raise SoCoException ( 'Cannot renew subscription after expiry' ) headers = { 'SID' : self . sid } if requested_timeout is None : requested_timeout = self . requested_timeout if requested_timeout is not None : headers [ "TIMEOUT" ] = "Second-{}" . format ( requested_timeout ) response = requests . request ( 'SUBSCRIBE' , self . service . base_url + self . service . event_subscription_url , headers = headers ) response . raise_for_status ( ) timeout = response . headers [ 'timeout' ] if timeout . lower ( ) == 'infinite' : self . timeout = None else : self . timeout = int ( timeout . lstrip ( 'Second-' ) ) self . _timestamp = time . time ( ) self . is_subscribed = True log . info ( "Renewed subscription to %s, sid: %s" , self . service . base_url + self . service . event_subscription_url , self . sid )
Renew the event subscription .
25,898
def only_on_master ( function ) : @ wraps ( function ) def inner_function ( self , * args , ** kwargs ) : if not self . is_coordinator : message = 'The method or property "{0}" can only be called/used ' 'on the coordinator in a group' . format ( function . __name__ ) raise SoCoSlaveException ( message ) return function ( self , * args , ** kwargs ) return inner_function
Decorator that raises SoCoSlaveException on master call on slave .
25,899
def ramp_to_volume ( self , volume , ramp_type = 'SLEEP_TIMER_RAMP_TYPE' ) : response = self . renderingControl . RampToVolume ( [ ( 'InstanceID' , 0 ) , ( 'Channel' , 'Master' ) , ( 'RampType' , ramp_type ) , ( 'DesiredVolume' , volume ) , ( 'ResetVolumeAfter' , False ) , ( 'ProgramURI' , '' ) ] ) return int ( response [ 'RampTime' ] )
Smoothly change the volume .