idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
45,800
def realpath ( self , spec , key ) : if key not in spec : return if not spec [ key ] : logger . warning ( "cannot resolve realpath of '%s' as it is not defined" , key ) return check = realpath ( join ( spec . get ( WORKING_DIR , '' ) , spec [ key ] ) ) if check != spec [ key ] : spec [ key ] = check logger . warning ( "realpath of '%s' resolved to '%s', spec is updated" , key , check ) return check
Resolve and update the path key in the spec with its realpath based on the working directory .
45,801
def setup_prefix_suffix ( self ) : self . compile_prefix = 'compile_' self . sourcepath_suffix = '_sourcepath' self . modpath_suffix = '_modpaths' self . targetpath_suffix = '_targetpaths'
Set up the compile prefix sourcepath and the targetpath suffix attributes which are the prefix to the function name and the suffixes to retrieve the values from for creating the generator function .
45,802
def _validate_build_target ( self , spec , target ) : if not realpath ( target ) . startswith ( spec [ BUILD_DIR ] ) : raise ValueError ( 'build_target %s is outside build_dir' % target )
Essentially validate that the target is inside the build_dir .
45,803
def transpile_modname_source_target ( self , spec , modname , source , target ) : if not isinstance ( self . transpiler , BaseUnparser ) : _deprecation_warning ( 'transpiler callable assigned to %r must be an instance of ' 'calmjs.parse.unparsers.base.BaseUnparser by calmjs-4.0.0; ' 'if the original transpile behavior is to be retained, the ' 'subclass may instead override this method to call ' '`simple_transpile_modname_source_target` directly, as ' 'this fallback behavior will be removed by calmjs-4.0.0' % ( self , ) ) return self . simple_transpile_modname_source_target ( spec , modname , source , target ) return self . _transpile_modname_source_target ( spec , modname , source , target )
The function that gets called by compile_transpile_entry for processing the provided JavaScript source file provided by some Python package through the transpiler instance .
45,804
def simple_transpile_modname_source_target ( self , spec , modname , source , target ) : opener = self . opener bd_target = self . _generate_transpile_target ( spec , target ) logger . info ( 'Transpiling %s to %s' , source , bd_target ) with opener ( source , 'r' ) as reader , opener ( bd_target , 'w' ) as _writer : writer = SourceWriter ( _writer ) self . transpiler ( spec , reader , writer ) if writer . mappings and spec . get ( GENERATE_SOURCE_MAP ) : source_map_path = bd_target + '.map' with open ( source_map_path , 'w' ) as sm_fd : self . dump ( encode_sourcemap ( filename = bd_target , mappings = writer . mappings , sources = [ source ] , ) , sm_fd ) source_map_url = basename ( source_map_path ) _writer . write ( '\n//# sourceMappingURL=' ) _writer . write ( source_map_url ) _writer . write ( '\n' )
The original simple transpile method called by compile_transpile on each target .
45,805
def compile_transpile_entry ( self , spec , entry ) : modname , source , target , modpath = entry transpiled_modpath = { modname : modpath } transpiled_target = { modname : target } export_module_name = [ modname ] self . transpile_modname_source_target ( spec , modname , source , target ) return transpiled_modpath , transpiled_target , export_module_name
Handler for each entry for the transpile method of the compile process . This invokes the transpiler that was set up to transpile the input files into the build directory .
45,806
def compile_bundle_entry ( self , spec , entry ) : modname , source , target , modpath = entry bundled_modpath = { modname : modpath } bundled_target = { modname : target } export_module_name = [ ] if isfile ( source ) : export_module_name . append ( modname ) copy_target = join ( spec [ BUILD_DIR ] , target ) if not exists ( dirname ( copy_target ) ) : makedirs ( dirname ( copy_target ) ) shutil . copy ( source , copy_target ) elif isdir ( source ) : copy_target = join ( spec [ BUILD_DIR ] , modname ) shutil . copytree ( source , copy_target ) return bundled_modpath , bundled_target , export_module_name
Handler for each entry for the bundle method of the compile process . This copies the source file or directory into the build directory .
45,807
def compile_loaderplugin_entry ( self , spec , entry ) : modname , source , target , modpath = entry handler = spec [ CALMJS_LOADERPLUGIN_REGISTRY ] . get ( modname ) if handler : return handler ( self , spec , modname , source , target , modpath ) logger . warning ( "no loaderplugin handler found for plugin entry '%s'" , modname ) return { } , { } , [ ]
Generic loader plugin entry handler .
45,808
def modname_source_target_modnamesource_to_modpath ( self , spec , modname , source , target , modname_source ) : return self . modname_source_target_to_modpath ( spec , modname , source , target )
Typical JavaScript tools will get confused if . js is added so by default the same modname is returned as path rather than the target file for the module path to be written to the output file for linkage by tools . Some other tools may desire the target to be returned instead or construct some other string that is more suitable for the tool that will do the assemble and link step .
45,809
def _gen_modname_source_target_modpath ( self , spec , d ) : for modname_source in d . items ( ) : try : modname = self . modname_source_to_modname ( spec , * modname_source ) source = self . modname_source_to_source ( spec , * modname_source ) target = self . modname_source_to_target ( spec , * modname_source ) modpath = self . modname_source_target_modnamesource_to_modpath ( spec , modname , source , target , modname_source ) except ValueError as e : f_name = sys . exc_info ( ) [ 2 ] . tb_next . tb_frame . f_code . co_name if isinstance ( e , ValueSkip ) : log = partial ( logger . info , "toolchain purposely skipping on '%s', " "reason: %s, where modname='%s', source='%s'" , ) else : log = partial ( logger . warning , "toolchain failed to acquire name with '%s', " "reason: %s, where modname='%s', source='%s'; " "skipping" , ) log ( f_name , e , * modname_source ) continue yield modname , source , target , modpath
Private generator that will consume those above functions . This should NOT be overridden .
45,810
def compile ( self , spec ) : spec [ EXPORT_MODULE_NAMES ] = export_module_names = spec . get ( EXPORT_MODULE_NAMES , [ ] ) if not isinstance ( export_module_names , list ) : raise TypeError ( "spec provided a '%s' but it is not of type list " "(got %r instead)" % ( EXPORT_MODULE_NAMES , export_module_names ) ) def compile_entry ( method , read_key , store_key ) : spec_read_key = read_key + self . sourcepath_suffix spec_modpath_key = store_key + self . modpath_suffix spec_target_key = store_key + self . targetpath_suffix if _check_key_exists ( spec , [ spec_modpath_key , spec_target_key ] ) : logger . error ( "aborting compile step %r due to existing key" , entry , ) return sourcepath_dict = spec . get ( spec_read_key , { } ) entries = self . _gen_modname_source_target_modpath ( spec , sourcepath_dict ) ( spec [ spec_modpath_key ] , spec [ spec_target_key ] , new_module_names ) = method ( spec , entries ) logger . debug ( "entry %r " "wrote %d entries to spec[%r], " "wrote %d entries to spec[%r], " "added %d export_module_names" , entry , len ( spec [ spec_modpath_key ] ) , spec_modpath_key , len ( spec [ spec_target_key ] ) , spec_target_key , len ( new_module_names ) , ) export_module_names . extend ( new_module_names ) for entry in self . compile_entries : if isinstance ( entry , ToolchainSpecCompileEntry ) : log = partial ( logging . getLogger ( entry . logger ) . log , entry . log_level , ( entry . store_key + "%s['%s'] is being rewritten from " "'%s' to '%s'; configuration may now be invalid" ) , ) if entry . logger else None compile_entry ( partial ( toolchain_spec_compile_entries , self , process_name = entry . process_name , overwrite_log = log , ) , entry . read_key , entry . store_key ) continue m , read_key , store_key = entry if callable ( m ) : method = m else : method = getattr ( self , self . compile_prefix + m , None ) if not callable ( method ) : logger . error ( "'%s' not a callable attribute for %r from " "compile_entries entry %r; skipping" , m , self , entry ) continue compile_entry ( method , read_key , store_key )
Generic step that compiles from a spec to build the specified things into the build directory build_dir by gathering all the files and feed them through the transpilation process or by simple copying .
45,811
def _calf ( self , spec ) : self . prepare ( spec ) self . compile ( spec ) self . assemble ( spec ) self . link ( spec ) self . finalize ( spec )
The main call assuming the base spec is prepared .
45,812
def calf ( self , spec ) : if not isinstance ( spec , Spec ) : raise TypeError ( 'spec must be of type Spec' ) if not spec . get ( BUILD_DIR ) : tempdir = realpath ( mkdtemp ( ) ) spec . advise ( CLEANUP , shutil . rmtree , tempdir ) build_dir = join ( tempdir , 'build' ) mkdir ( build_dir ) spec [ BUILD_DIR ] = build_dir else : build_dir = self . realpath ( spec , BUILD_DIR ) if not isdir ( build_dir ) : logger . error ( "build_dir '%s' is not a directory" , build_dir ) raise_os_error ( errno . ENOTDIR , build_dir ) self . realpath ( spec , EXPORT_TARGET ) spec . handle ( SETUP ) try : process = ( 'prepare' , 'compile' , 'assemble' , 'link' , 'finalize' ) for p in process : spec . handle ( 'before_' + p ) getattr ( self , p ) ( spec ) spec . handle ( 'after_' + p ) spec . handle ( SUCCESS ) except ToolchainCancel : pass finally : spec . handle ( CLEANUP )
Typical safe usage is this which sets everything that could be problematic up .
45,813
def transpile_modname_source_target ( self , spec , modname , source , target ) : return self . simple_transpile_modname_source_target ( spec , modname , source , target )
Calls the original version .
45,814
def get_bin_version_str ( bin_path , version_flag = '-v' , kw = { } ) : try : prog = _get_exec_binary ( bin_path , kw ) version_str = version_expr . search ( check_output ( [ prog , version_flag ] , ** kw ) . decode ( locale ) ) . groups ( ) [ 0 ] except OSError : logger . warning ( "failed to execute '%s'" , bin_path ) return None except Exception : logger . exception ( "encountered unexpected error while trying to find version of " "'%s':" , bin_path ) return None logger . info ( "'%s' is version '%s'" , bin_path , version_str ) return version_str
Get the version string through the binary .
45,815
def get_bin_version ( bin_path , version_flag = '-v' , kw = { } ) : version_str = get_bin_version_str ( bin_path , version_flag , kw ) if version_str : return tuple ( int ( i ) for i in version_str . split ( '.' ) )
Get the version string through the binary and return a tuple of integers .
45,816
def node ( self , source , args = ( ) , env = { } ) : return self . _exec ( self . node_bin , source , args = args , env = env )
Calls node with an inline source .
45,817
def create_for_module_vars ( cls , scope_vars ) : inst = cls ( ) if not inst . _set_env_path_with_node_modules ( ) : import warnings msg = ( "Unable to locate the '%(binary)s' binary or runtime; default " "module level functions will not work. Please either provide " "%(PATH)s and/or update %(PATH)s environment variable " "with one that provides '%(binary)s'; or specify a " "working %(NODE_PATH)s environment variable with " "%(binary)s installed; or have install '%(binary)s' into " "the current working directory (%(cwd)s) either through " "npm or calmjs framework for this package. Restart or " "reload this module once that is done. Alternatively, " "create a manual Driver instance for '%(binary)s' with " "explicitly defined arguments." % { 'binary' : inst . binary , 'PATH' : 'PATH' , 'NODE_PATH' : 'NODE_PATH' , 'cwd' : inst . join_cwd ( ) , } ) warnings . warn ( msg , RuntimeWarning ) scope_vars . update ( inst . _aliases ) return inst
This was originally designed to be invoked at the module level for packages that implement specific support but this can be used to create an instance that has the Node . js backed executable be found via current directory s node_modules or NODE_PATH .
45,818
def pkg_manager_view ( self , package_names , stream = None , explicit = False , ** kw ) : to_dists = { False : find_packages_requirements_dists , True : pkg_names_to_dists , } pkg_names , malformed = convert_package_names ( package_names ) if malformed : msg = 'malformed package name(s) specified: %s' % ', ' . join ( malformed ) raise ValueError ( msg ) if len ( pkg_names ) == 1 : logger . info ( "generating a flattened '%s' for '%s'" , self . pkgdef_filename , pkg_names [ 0 ] , ) else : logger . info ( "generating a flattened '%s' for packages {%s}" , self . pkgdef_filename , ', ' . join ( pkg_names ) , ) dists = to_dists [ explicit ] ( pkg_names ) pkgdef_json = flatten_dist_egginfo_json ( dists , filename = self . pkgdef_filename , dep_keys = self . dep_keys , ) if pkgdef_json . get ( self . pkg_name_field , NotImplemented ) is NotImplemented : pkg_name = Requirement . parse ( pkg_names [ - 1 ] ) . project_name pkgdef_json [ self . pkg_name_field ] = pkg_name if stream : self . dump ( pkgdef_json , stream ) stream . write ( '\n' ) return pkgdef_json
Returns the manifest JSON for the Python package name . Default npm implementation calls for package . json .
45,819
def pkg_manager_install ( self , package_names = None , production = None , development = None , args = ( ) , env = { } , ** kw ) : if not package_names : logger . warning ( "no package name supplied, not continuing with '%s %s'" , self . pkg_manager_bin , self . install_cmd , ) return result = self . pkg_manager_init ( package_names , ** kw ) if result is False : logger . warning ( "not continuing with '%s %s' as the generation of " "'%s' failed" , self . pkg_manager_bin , self . install_cmd , self . pkgdef_filename ) return call_kw = self . _gen_call_kws ( ** env ) logger . debug ( "invoking '%s %s'" , self . pkg_manager_bin , self . install_cmd ) if self . env_path : logger . debug ( "invoked with env_path '%s'" , self . env_path ) if self . working_dir : logger . debug ( "invoked from working directory '%s'" , self . working_dir ) try : cmd = [ self . _get_exec_binary ( call_kw ) , self . install_cmd ] cmd . extend ( self . _prodev_flag ( production , development , result . get ( self . devkey ) ) ) cmd . extend ( args ) logger . info ( 'invoking %s' , ' ' . join ( cmd ) ) call ( cmd , ** call_kw ) except ( IOError , OSError ) : logger . error ( "invocation of the '%s' binary failed; please ensure it and " "its dependencies are installed and available." , self . binary ) raise return True
This will install all dependencies into the current working directory for the specific Python package from the selected JavaScript package manager ; this requires that this package manager s package definition file to be properly generated first otherwise the process will be aborted .
45,820
def run ( self , args = ( ) , env = { } ) : return self . _exec ( self . binary , args = args , env = env )
Calls the package manager with the arguments .
45,821
def _get_exec_binary ( binary , kw ) : binary = which ( binary , path = kw . get ( 'env' , { } ) . get ( 'PATH' ) ) if binary is None : raise_os_error ( errno . ENOENT ) return binary
On win32 the subprocess module can only reliably resolve the target binary if it s actually a binary ; as for a Node . js script it seems to only work iff shell = True was specified presenting a security risk . Resolve the target manually through which will account for that .
45,822
def _init_entry_points ( self , entry_points ) : logger . debug ( "registering %d entry points for registry '%s'" , len ( entry_points ) , self . registry_name , ) for entry_point in entry_points : try : logger . debug ( "registering entry point '%s' from '%s'" , entry_point , entry_point . dist , ) self . _init_entry_point ( entry_point ) except ImportError : logger . warning ( 'ImportError: %s not found; skipping registration' , entry_point . module_name ) except Exception : logger . exception ( "registration of entry point '%s' from '%s' to registry " "'%s' failed with the following exception" , entry_point , entry_point . dist , self . registry_name , )
Default initialization loop .
45,823
def store_records_for_package ( self , entry_point , records ) : pkg_module_records = self . _dist_to_package_module_map ( entry_point ) pkg_module_records . extend ( records )
Store the records in a way that permit lookup by package
45,824
def register_entry_point ( self , entry_point ) : module = _import_module ( entry_point . module_name ) self . _register_entry_point_module ( entry_point , module )
Register a lone entry_point
45,825
def _register_entry_point_module ( self , entry_point , module ) : records_map = self . _map_entry_point_module ( entry_point , module ) self . store_records_for_package ( entry_point , list ( records_map . keys ( ) ) ) for module_name , records in records_map . items ( ) : if module_name in self . records : logger . info ( "module '%s' was already declared in registry '%s'; " "applying new records on top." , module_name , self . registry_name , ) logger . debug ( "overwriting keys: %s" , sorted ( set ( self . records [ module_name ] . keys ( ) ) & set ( records . keys ( ) ) ) ) self . records [ module_name ] . update ( records ) else : logger . debug ( "adding records for module '%s' to registry '%s'" , module_name , self . registry_name , ) self . records [ module_name ] = records
Private method that registers an entry_point with a provided module .
45,826
def get_record ( self , name ) : result = { } result . update ( self . records . get ( name , { } ) ) return result
Get a record by name
45,827
def resolve_parent_registry_name ( self , registry_name , suffix ) : if not registry_name . endswith ( suffix ) : raise ValueError ( "child module registry name defined with invalid suffix " "('%s' does not end with '%s')" % ( registry_name , suffix ) ) return registry_name [ : - len ( suffix ) ]
Subclasses should override to specify the default suffix as the invocation is done without a suffix .
45,828
def get_record ( self , name ) : return set ( ) . union ( self . records . get ( name , set ( ) ) )
Get a record for the registered name which will be a set of matching desired module names for the given path .
45,829
def which ( self ) : if self . binary is None : return None return which ( self . binary , path = self . env_path )
Figure out which binary this will execute .
45,830
def find_node_modules_basedir ( self ) : paths = [ ] local_node_path = self . join_cwd ( NODE_MODULES ) if isdir ( local_node_path ) : paths . append ( local_node_path ) if self . node_path : paths . extend ( self . node_path . split ( pathsep ) ) return paths
Find all node_modules directories configured to be accessible through this driver instance .
45,831
def which_with_node_modules ( self ) : if self . binary is None : return None if isdir ( self . join_cwd ( NODE_MODULES ) ) : logger . debug ( "'%s' instance will attempt to locate '%s' binary from " "%s%s%s%s%s, located through the working directory" , self . __class__ . __name__ , self . binary , self . join_cwd ( ) , sep , NODE_MODULES , sep , NODE_MODULES_BIN , ) if self . node_path : logger . debug ( "'%s' instance will attempt to locate '%s' binary from " "its %s of %s" , self . __class__ . __name__ , self . binary , NODE_PATH , self . node_path , ) paths = self . find_node_modules_basedir ( ) whichpaths = pathsep . join ( join ( p , NODE_MODULES_BIN ) for p in paths ) if paths : logger . debug ( "'%s' instance located %d possible paths to the '%s' binary, " "which are %s" , self . __class__ . __name__ , len ( paths ) , self . binary , whichpaths , ) return which ( self . binary , path = whichpaths )
Which with node_path and node_modules
45,832
def _set_env_path_with_node_modules ( self ) : modcls_name = ':' . join ( ( self . __class__ . __module__ , self . __class__ . __name__ ) ) if self . binary is None : raise ValueError ( "binary undefined for '%s' instance" % modcls_name ) logger . debug ( "locating '%s' node binary for %s instance..." , self . binary , modcls_name , ) default = self . which ( ) if default is not None : logger . debug ( "found '%s'; " "not modifying PATH environment variable in instance of '%s'." , realpath ( default ) , modcls_name ) return True target = self . which_with_node_modules ( ) if target : self . env_path = dirname ( target ) logger . debug ( "located '%s' binary at '%s'; setting PATH environment " "variable for '%s' instance." , self . binary , self . env_path , modcls_name ) return True else : logger . debug ( "Unable to locate '%s'; not modifying PATH environment " "variable for instance of '%s'." , self . binary , modcls_name ) return False
Attempt to locate and set the paths to the binary with the working directory defined for this instance .
45,833
def _exec ( self , binary , stdin = '' , args = ( ) , env = { } ) : call_kw = self . _gen_call_kws ( ** env ) call_args = [ self . _get_exec_binary ( call_kw ) ] call_args . extend ( args ) return fork_exec ( call_args , stdin , ** call_kw )
Executes the binary using stdin and args with environment variables .
45,834
def dump ( self , blob , stream ) : json . dump ( blob , stream , indent = self . indent , sort_keys = True , separators = self . separators , )
Call json . dump with the attributes of this instance as arguments .
45,835
def dumps ( self , blob ) : return json . dumps ( blob , indent = self . indent , sort_keys = True , separators = self . separators , )
Call json . dumps with the attributes of this instance as arguments .
45,836
def join_cwd ( self , path = None ) : if self . working_dir : logger . debug ( "'%s' instance 'working_dir' set to '%s' for join_cwd" , type ( self ) . __name__ , self . working_dir , ) cwd = self . working_dir else : cwd = getcwd ( ) logger . debug ( "'%s' instance 'working_dir' unset; " "default to process '%s' for join_cwd" , type ( self ) . __name__ , cwd , ) if path : return join ( cwd , path ) return cwd
Join the path with the current working directory . If it is specified for this instance of the object it will be used otherwise rely on the global value .
45,837
def _unicode_handler ( obj ) : try : result = obj . isoformat ( ) except AttributeError : raise TypeError ( "Unserializable object {} of type {}" . format ( obj , type ( obj ) ) ) return result
Transforms an unicode string into a UTF - 8 equivalent .
45,838
def encode ( self , entity ) : encoded = self . _dict_encoder . encode ( entity ) if sys . version_info [ 0 ] == 2 : result = json . dumps ( encoded , ensure_ascii = False , default = _iso_handler , encoding = 'latin1' ) else : result = json . dumps ( encoded , ensure_ascii = False , default = _iso_handler ) return result
Encodes the data creating a JSON structure from an instance from the domain model .
45,839
def ipi_base_number ( name = None ) : if name is None : name = 'IPI Base Number Field' field = pp . Regex ( 'I-[0-9]{9}-[0-9]' ) field . setName ( name ) field_num = basic . numeric ( 13 ) field_num . setName ( name ) field = field | field_num field . leaveWhitespace ( ) return field . setResultsName ( 'ipi_base_n' )
IPI Base Number field .
45,840
def ipi_name_number ( name = None ) : if name is None : name = 'IPI Name Number Field' field = basic . numeric ( 11 ) field . setName ( name ) return field . setResultsName ( 'ipi_name_n' )
IPI Name Number field .
45,841
def iswc ( name = None ) : if name is None : name = 'ISWC Field' field = pp . Regex ( 'T[0-9]{10}' ) field . setName ( name ) field . leaveWhitespace ( ) return field . setResultsName ( 'iswc' )
ISWC field .
45,842
def _assert_is_percentage ( value , maximum = 100 ) : if value < 0 or value > maximum : message = 'The value on a percentage field should be between 0 and %s' % maximum raise pp . ParseException ( message )
Makes sure the received value is a percentage . Otherwise an exception is thrown .
45,843
def ean_13 ( name = None ) : if name is None : name = 'EAN 13 Field' field = basic . numeric ( 13 ) field = field . setName ( name ) return field . setResultsName ( 'ean_13' )
Creates the grammar for an EAN 13 code .
45,844
def isrc ( name = None ) : if name is None : name = 'ISRC Field' field = _isrc_short ( name ) | _isrc_long ( name ) field . setName ( name ) return field . setResultsName ( 'isrc' )
Creates the grammar for an ISRC code .
45,845
def _isrc_long ( name = None ) : config = CWRTables ( ) if name is None : name = 'ISRC Field' country = config . get_data ( 'isrc_country_code' ) country_regex = '' for c in country : if len ( country_regex ) > 0 : country_regex += '|' country_regex += c country_regex = '(' + country_regex + ')' field = pp . Regex ( country_regex + '.{3}[0-9]{2}[0-9]{5}' ) field . setName ( name ) return field . setResultsName ( 'isrc' )
Creates the grammar for a short ISRC code .
45,846
def visan ( name = None ) : if name is None : name = 'V-ISAN Field' field = pp . Regex ( '[0-9]{25}' ) field . setName ( name ) return field . setResultsName ( 'visan' )
Creates the grammar for a V - ISAN code .
45,847
def audio_visual_key ( name = None ) : if name is None : name = 'AVI Field' society_code = basic . numeric ( 3 ) society_code = society_code . setName ( 'Society Code' ) . setResultsName ( 'society_code' ) av_number = basic . alphanum ( 15 , extended = True , isLast = True ) field_empty = pp . Regex ( '[ ]{15}' ) field_empty . setParseAction ( pp . replaceWith ( '' ) ) av_number = av_number | field_empty av_number = av_number . setName ( 'Audio-Visual Number' ) . setResultsName ( 'av_number' ) field = pp . Group ( society_code + pp . Optional ( av_number ) ) field . setParseAction ( lambda v : _to_avi ( v [ 0 ] ) ) field = field . setName ( name ) return field . setResultsName ( 'audio_visual_key' )
Creates the grammar for an Audio Visual Key code .
45,848
def lookup_int ( values , name = None ) : field = basic . lookup ( values , name ) field . addParseAction ( lambda l : int ( l [ 0 ] ) ) return field
Lookup field which transforms the result into an integer .
45,849
def extract_function_argument ( text , f_name , f_argn , f_argt = asttypes . String ) : tree = parse ( text ) return list ( filter_function_argument ( tree , f_name , f_argn , f_argt ) )
Extract a specific argument from a specific function name .
45,850
def yield_amd_require_string_arguments ( node , pos , reserved_module = reserved_module , wrapped = define_wrapped ) : for i , child in enumerate ( node . args . items [ pos ] ) : if isinstance ( child , asttypes . String ) : result = to_str ( child ) if ( ( result not in reserved_module ) and ( result != define_wrapped . get ( i ) ) ) : yield result
This yields only strings within the lists provided in the argument list at the specified position from a function call .
45,851
def yield_string_argument ( node , pos ) : if not isinstance ( node . args . items [ pos ] , asttypes . String ) : return yield to_str ( node . args . items [ pos ] )
Yield just a string argument from position of the function call .
45,852
def yield_module_imports ( root , checks = string_imports ( ) ) : if not isinstance ( root , asttypes . Node ) : raise TypeError ( 'provided root must be a node' ) for child in yield_function ( root , deep_filter ) : for f , condition in checks : if condition ( child ) : for name in f ( child ) : yield name continue
Gather all require and define calls from unbundled JavaScript source files and yield all module names . The imports can either be of the CommonJS or AMD syntax .
45,853
def yield_module_imports_nodes ( root , checks = import_nodes ( ) ) : if not isinstance ( root , asttypes . Node ) : raise TypeError ( 'provided root must be a node' ) for child in yield_function ( root , deep_filter ) : for f , condition in checks : if condition ( child ) : for name in f ( child ) : yield name continue
Yield all nodes that provide an import
45,854
def resolve_child_module_registries_lineage ( registry ) : children = [ registry ] while isinstance ( registry , BaseChildModuleRegistry ) : if registry . parent in children : raise TypeError ( "registry '%s' was already recorded in the lineage, " "indicating that it may be some (grand)child of itself, which " "is an illegal reference in the registry system; previously " "resolved lineage is: %r" % ( registry . parent . registry_name , [ r . registry_name for r in reversed ( children ) ] ) ) pl = len ( registry . parent . registry_name ) if len ( registry . parent . registry_name ) > len ( registry . registry_name ) : logger . warning ( "the parent registry '%s' somehow has a longer name than its " "child registry '%s'; the underlying registry class may be " "constructed in an invalid manner" , registry . parent . registry_name , registry . registry_name , ) elif registry . registry_name [ : pl ] != registry . parent . registry_name : logger . warning ( "child registry '%s' does not share the same common prefix as " "its parent registry '%s'; there may be errors with how the " "related registries are set up or constructed" , registry . registry_name , registry . parent . registry_name , ) children . append ( registry . parent ) registry = registry . parent return iter ( reversed ( children ) )
For a given child module registry attempt to resolve the lineage .
45,855
def resource_filename_mod_dist ( module_name , dist ) : try : return pkg_resources . resource_filename ( dist . as_requirement ( ) , join ( * module_name . split ( '.' ) ) ) except pkg_resources . DistributionNotFound : logger . warning ( "distribution '%s' not found, falling back to resolution using " "module_name '%s'" , dist , module_name , ) return pkg_resources . resource_filename ( module_name , '' )
Given a module name and a distribution attempt to resolve the actual path to the module .
45,856
def resource_filename_mod_entry_point ( module_name , entry_point ) : if entry_point . dist is None : result = pkg_resources . resource_filename ( module_name , '' ) else : result = resource_filename_mod_dist ( module_name , entry_point . dist ) if not result : logger . warning ( "resource path cannot be found for module '%s' and entry_point " "'%s'" , module_name , entry_point ) return None if not exists ( result ) : logger . warning ( "resource path found at '%s' for module '%s' and entry_point " "'%s', but it does not exist" , result , module_name , entry_point , ) return None return result
If a given package declares a namespace and also provide submodules nested at that namespace level and for whatever reason that module is needed Python s import mechanism will not have a path associated with that module . However if given an entry_point this path can be resolved through its distribution . That said the default resource_filename function does not accept an entry_point and so we have to chain that back together manually .
45,857
def modgen ( module , entry_point , modpath = 'pkg_resources' , globber = 'root' , fext = JS_EXT , registry = _utils ) : globber_f = globber if callable ( globber ) else registry [ 'globber' ] [ globber ] modpath_f = modpath if callable ( modpath ) else registry [ 'modpath' ] [ modpath ] logger . debug ( 'modgen generating file listing for module %s' , module . __name__ , ) module_frags = module . __name__ . split ( '.' ) module_base_paths = modpath_f ( module , entry_point ) for module_base_path in module_base_paths : logger . debug ( 'searching for *%s files in %s' , fext , module_base_path ) for path in globber_f ( module_base_path , '*' + fext ) : mod_path = ( relpath ( path , module_base_path ) ) yield ( module_frags + mod_path [ : - len ( fext ) ] . split ( sep ) , module_base_path , mod_path , )
JavaScript styled module location listing generator .
45,858
def register ( util_type , registry = _utils ) : def marker ( f ) : mark = util_type + '_' if not f . __name__ . startswith ( mark ) : raise TypeError ( 'not registering %s to %s' % ( f . __name__ , util_type ) ) registry [ util_type ] [ f . __name__ [ len ( mark ) : ] ] = f return f return marker
Crude local registration decorator for a crude local registry of all utilities local to this module .
45,859
def modpath_pkg_resources ( module , entry_point ) : result = [ ] try : path = resource_filename_mod_entry_point ( module . __name__ , entry_point ) except ImportError : logger . warning ( "module '%s' could not be imported" , module . __name__ ) except Exception : logger . warning ( "%r does not appear to be a valid module" , module ) else : if path : result . append ( path ) return result
Goes through pkg_resources for compliance with various PEPs .
45,860
def mapper_python ( module , entry_point , globber = 'root' , fext = JS_EXT ) : return mapper ( module , entry_point = entry_point , modpath = 'pkg_resources' , globber = globber , modname = 'python' , fext = fext )
Default mapper using python style globber
45,861
def _printable_id_code ( self ) : code = str ( self . id_code ) while len ( code ) < self . _code_size : code = '0' + code return code
Returns the code in a printable form filling with zeros if needed .
45,862
def _printable_id_code ( self ) : code = super ( ISWCCode , self ) . _printable_id_code ( ) code1 = code [ : 3 ] code2 = code [ 3 : 6 ] code3 = code [ - 3 : ] return '%s.%s.%s' % ( code1 , code2 , code3 )
Returns the code in a printable form separating it into groups of three characters using a point between them .
45,863
def write ( self , s ) : lines = s . splitlines ( True ) for line in lines : self . current_mapping . append ( ( self . generated_col , self . index , self . row , self . col_last ) ) self . stream . write ( line ) if line [ - 1 ] in '\r\n' : self . _newline ( ) self . row = 1 self . col_current = 0 else : self . col_current += len ( line ) self . generated_col = self . col_last = len ( line )
Standard write for standard sources part of the original file .
45,864
def discard ( self , s ) : lines = s . splitlines ( True ) for line in lines : if line [ - 1 ] not in '\r\n' : if not self . warn : logger . warning ( 'partial line discard UNSUPPORTED; source map ' 'generated will not match at the column level' ) self . warn = True else : self . row += 1
Discard from original file .
45,865
def write_padding ( self , s ) : lines = s . splitlines ( True ) for line in lines : self . stream . write ( line ) if line [ - 1 ] in '\r\n' : self . _newline ( ) else : self . generated_col += len ( line )
Write string that are not part of the original file .
45,866
def format_currency ( number , currency , format , locale = babel . numbers . LC_NUMERIC , force_frac = None , format_type = 'standard' ) : locale = babel . core . Locale . parse ( locale ) if format : pattern = babel . numbers . parse_pattern ( format ) else : try : pattern = locale . currency_formats [ format_type ] except KeyError : raise babel . numbers . UnknownCurrencyFormatError ( "%r is not a known currency format type" % format_type ) if force_frac is None : fractions = babel . core . get_global ( 'currency_fractions' ) try : digits = fractions [ currency ] [ 0 ] except KeyError : digits = fractions [ 'DEFAULT' ] [ 0 ] frac = ( digits , digits ) else : frac = force_frac return pattern . apply ( number , locale , currency = currency , force_frac = frac )
Same as babel . numbers . format_currency but has force_frac argument instead of currency_digits .
45,867
def reflect_db ( self ) : self . metadata . reflect ( views = True , extend_existing = True ) self . base = automap_base ( metadata = self . metadata ) self . base . prepare ( )
No - op to reflect db info .
45,868
def trace_parser ( p ) : def nodes_to_string ( n ) : if isinstance ( n , list ) : result = '[ ' for m in map ( nodes_to_string , n ) : result += m result += ' ' result += ']' return result else : try : return tostring ( remove_private ( copy ( n ) ) ) except Exception as e : return n def print_trace ( * args ) : import sys sys . stderr . write ( " " * tracing_level ) for arg in args : sys . stderr . write ( str ( arg ) ) sys . stderr . write ( ' ' ) sys . stderr . write ( '\n' ) sys . stderr . flush ( ) def wrapped ( s , * args , ** kwargs ) : global tracing_level print_trace ( p . __name__ , repr ( s ) ) tracing_level += 1 s , n = p ( s , * args , ** kwargs ) tracing_level -= 1 print_trace ( "-> " , repr ( s ) , nodes_to_string ( n ) ) return s , n return wrapped
Decorator for tracing the parser .
45,869
def change_response ( x , prob , index ) : N = ( x == index ) . sum ( ) x [ x == index ] = dist . sample ( N )
change every response in x that matches index by randomly sampling from prob
45,870
def merge_dict ( base , additional ) : if base is None : return additional if additional is None : return base if not ( isinstance ( base , collections . Mapping ) and isinstance ( additional , collections . Mapping ) ) : return additional merged = base for key , value in additional . items ( ) : if isinstance ( value , collections . Mapping ) : merged [ key ] = merge_dict ( merged . get ( key ) , value ) else : merged [ key ] = value return merged
Combine two dictionary - like objects .
45,871
def expand_config ( d , dirs ) : context = { 'user_cache_dir' : dirs . user_cache_dir , 'user_config_dir' : dirs . user_config_dir , 'user_data_dir' : dirs . user_data_dir , 'user_log_dir' : dirs . user_log_dir , 'site_config_dir' : dirs . site_config_dir , 'site_data_dir' : dirs . site_data_dir , } for k , v in d . items ( ) : if isinstance ( v , dict ) : expand_config ( v , dirs ) if isinstance ( v , string_types ) : d [ k ] = os . path . expanduser ( os . path . expandvars ( d [ k ] ) ) d [ k ] = d [ k ] . format ( ** context )
Expand configuration XDG variables environmental variables and tildes .
45,872
def bootstrap_unihan ( metadata , options = { } ) : options = merge_dict ( UNIHAN_ETL_DEFAULT_OPTIONS . copy ( ) , options ) p = unihan . Packager ( options ) p . download ( ) data = p . export ( ) table = create_unihan_table ( UNIHAN_FIELDS , metadata ) metadata . create_all ( ) metadata . bind . execute ( table . insert ( ) , data )
Download extract and import unihan to database .
45,873
def is_bootstrapped ( metadata ) : fields = UNIHAN_FIELDS + DEFAULT_COLUMNS if TABLE_NAME in metadata . tables . keys ( ) : table = metadata . tables [ TABLE_NAME ] if set ( fields ) == set ( c . name for c in table . columns ) : return True else : return False else : return False
Return True if cihai is correctly bootstrapped .
45,874
def get_address ( pk , main_net = True , prefix = None ) : if isinstance ( pk , str ) : pk = unhexlify ( pk . encode ( ) ) assert len ( pk ) == 32 , 'PK is 32bytes {}' . format ( len ( pk ) ) k = keccak_256 ( pk ) . digest ( ) ripe = RIPEMD160 . new ( k ) . digest ( ) if prefix is None : body = ( b"\x68" if main_net else b"\x98" ) + ripe else : assert isinstance ( prefix , bytes ) , 'Set prefix 1 bytes' body = prefix + ripe checksum = keccak_256 ( body ) . digest ( ) [ 0 : 4 ] return b32encode ( body + checksum ) . decode ( )
compute the nem - py address from the public one
45,875
def lookup_char ( self , char ) : Unihan = self . sql . base . classes . Unihan return self . sql . session . query ( Unihan ) . filter_by ( char = char )
Return character information from datasets .
45,876
def reverse_char ( self , hints ) : if isinstance ( hints , string_types ) : hints = [ hints ] Unihan = self . sql . base . classes . Unihan columns = Unihan . __table__ . columns return self . sql . session . query ( Unihan ) . filter ( or_ ( * [ column . contains ( hint ) for column in columns for hint in hints ] ) )
Return QuerySet of objects from SQLAlchemy of results .
45,877
def with_fields ( self , * fields ) : Unihan = self . sql . base . classes . Unihan query = self . sql . session . query ( Unihan ) for field in fields : query = query . filter ( Column ( field ) . isnot ( None ) ) return query
Returns list of characters with information for certain fields .
45,878
def symbols_to_prob ( symbols ) : myCounter = Counter ( symbols ) N = float ( len ( list ( symbols ) ) ) for k in myCounter : myCounter [ k ] /= N return myCounter
Return a dict mapping symbols to probability .
45,879
def combine_symbols ( * args ) : for arg in args : if len ( arg ) != len ( args [ 0 ] ) : raise ValueError ( "combine_symbols got inputs with different sizes" ) return tuple ( zip ( * args ) )
Combine different symbols into a super - symbol
45,880
def mi_chain_rule ( X , y ) : chain = np . zeros ( len ( X ) ) chain [ 0 ] = mi ( X [ 0 ] , y ) for i in range ( 1 , len ( X ) ) : chain [ i ] = cond_mi ( X [ i ] , y , X [ : i ] ) return chain
Decompose the information between all X and y according to the chain rule and return all the terms in the chain rule .
45,881
def KL_divergence ( P , Q ) : assert ( P . keys ( ) == Q . keys ( ) ) distance = 0 for k in P . keys ( ) : distance += P [ k ] * log ( P [ k ] / Q [ k ] ) return distance
Compute the KL divergence between distributions P and Q P and Q should be dictionaries linking symbols to probabilities . the keys to P and Q should be the same .
45,882
def bin ( x , bins , maxX = None , minX = None ) : if maxX is None : maxX = x . max ( ) if minX is None : minX = x . min ( ) if not np . iterable ( bins ) : bins = np . linspace ( minX , maxX + 1e-5 , bins + 1 ) return np . digitize ( x . ravel ( ) , bins ) . reshape ( x . shape ) , bins
bin signal x using binsN bin . If minX maxX are None they default to the full range of the signal . If they are not None everything above maxX gets assigned to binsN - 1 and everything below minX gets assigned to 0 this is effectively the same as clipping x before passing it to bin
45,883
def recover ( y ) : p = ( y * y - 1 ) * inverse ( D * y * y + 1 ) x = powmod ( p , ( PRIME + 3 ) // 8 , PRIME ) if ( x * x - p ) % PRIME != 0 : i = powmod ( 2 , ( PRIME - 1 ) // 4 , PRIME ) x = ( x * i ) % PRIME if x % 2 != 0 : x = PRIME - x return x
given a value y recover the preimage x
45,884
def time_stretch ( y , sr , rate , rbargs = None ) : if rate <= 0 : raise ValueError ( 'rate must be strictly positive' ) if rate == 1.0 : return y if rbargs is None : rbargs = dict ( ) rbargs . setdefault ( '--tempo' , rate ) return __rubberband ( y , sr , ** rbargs )
Apply a time stretch of rate to an audio time series .
45,885
def timemap_stretch ( y , sr , time_map , rbargs = None ) : if rbargs is None : rbargs = dict ( ) is_positive = all ( time_map [ i ] [ 0 ] >= 0 and time_map [ i ] [ 1 ] >= 0 for i in range ( len ( time_map ) ) ) is_monotonic = all ( time_map [ i ] [ 0 ] <= time_map [ i + 1 ] [ 0 ] and time_map [ i ] [ 1 ] <= time_map [ i + 1 ] [ 1 ] for i in range ( len ( time_map ) - 1 ) ) if not is_positive : raise ValueError ( 'time_map should be non-negative' ) if not is_monotonic : raise ValueError ( 'time_map is not monotonic' ) if time_map [ - 1 ] [ 0 ] != len ( y ) : raise ValueError ( 'time_map[-1] should correspond to the last sample' ) time_stretch = time_map [ - 1 ] [ 1 ] * 1.0 / time_map [ - 1 ] [ 0 ] rbargs . setdefault ( '--time' , time_stretch ) stretch_file = tempfile . NamedTemporaryFile ( mode = 'w' , suffix = '.txt' , delete = False ) try : for t in time_map : stretch_file . write ( '{:0} {:1}\n' . format ( t [ 0 ] , t [ 1 ] ) ) stretch_file . close ( ) rbargs . setdefault ( '--timemap' , stretch_file . name ) y_stretch = __rubberband ( y , sr , ** rbargs ) finally : os . unlink ( stretch_file . name ) return y_stretch
Apply a timemap stretch to an audio time series .
45,886
def pitch_shift ( y , sr , n_steps , rbargs = None ) : if n_steps == 0 : return y if rbargs is None : rbargs = dict ( ) rbargs . setdefault ( '--pitch' , n_steps ) return __rubberband ( y , sr , ** rbargs )
Apply a pitch shift to an audio time series .
45,887
def python_to_ucn ( uni_char , as_bytes = False ) : ucn = uni_char . encode ( 'unicode_escape' ) . decode ( 'latin1' ) ucn = text_type ( ucn ) . replace ( '\\' , '' ) . upper ( ) . lstrip ( 'U' ) if len ( ucn ) > int ( 4 ) : ucn = ucn . lstrip ( "0" ) ucn = "U+" + ucn . upper ( ) if as_bytes : ucn = ucn . encode ( 'latin1' ) return ucn
Return UCN character from Python Unicode character .
45,888
def python_to_euc ( uni_char , as_bytes = False ) : euc = repr ( uni_char . encode ( "gb2312" ) ) [ 1 : - 1 ] . replace ( "\\x" , "" ) . strip ( "'" ) if as_bytes : euc = euc . encode ( 'utf-8' ) assert isinstance ( euc , bytes ) return euc
Return EUC character from a Python Unicode character .
45,889
def ucnstring_to_unicode ( ucn_string ) : ucn_string = ucnstring_to_python ( ucn_string ) . decode ( 'utf-8' ) assert isinstance ( ucn_string , text_type ) return ucn_string
Return ucnstring as Unicode .
45,890
def parse_var ( var ) : bits = var . split ( "<" , 1 ) if len ( bits ) < 2 : tag = None else : tag = bits [ 1 ] return ucn_to_unicode ( bits [ 0 ] ) , tag
Returns a tuple consisting of a string and a tag or None if none is specified .
45,891
def from_file ( cls , config_path = None , * args , ** kwargs ) : config_reader = kaptan . Kaptan ( ) config = { } if config_path : if not os . path . exists ( config_path ) : raise exc . CihaiException ( '{0} does not exist.' . format ( os . path . abspath ( config_path ) ) ) if not any ( config_path . endswith ( ext ) for ext in ( 'json' , 'yml' , 'yaml' , 'ini' ) ) : raise exc . CihaiException ( '{0} does not have a yaml,yml,json,ini extend.' . format ( os . path . abspath ( config_path ) ) ) else : custom_config = config_reader . import_config ( config_path ) . get ( ) config = merge_dict ( config , custom_config ) return cls ( config )
Create a Cihai instance from a JSON or YAML config .
45,892
def _process_locale ( self , locale ) : if locale . lower ( ) . startswith ( 'en' ) : return False return ( locale in self . enabled_locales or self . reverse_locale_map . get ( locale . lower ( ) , None ) in self . enabled_locales or locale in self . lower_locales or self . reverse_locale_map . get ( locale . lower ( ) , None ) in self . lower_locales )
Return True if this locale should be processed .
45,893
def desk_locale ( self , locale ) : locale = locale . lower ( ) . replace ( '-' , '_' ) return self . vendor_locale_map . get ( locale , locale )
Return the Desk - style locale for locale .
45,894
def push ( self ) : tx = Tx ( self . tx_project_slug ) template = babel . messages . catalog . Catalog ( ) for topic in self . desk . topics ( ) : if topic . show_in_portal : template . add ( topic . name ) template_po = StringIO ( ) babel . messages . pofile . write_po ( template_po , template ) tx . create_or_update_resource ( self . TOPIC_STRINGS_SLUG , DEFAULT_SOURCE_LANGUAGE , "Help Center Topics" , template_po . getvalue ( ) , i18n_type = 'PO' , project_slug = self . tx_project_slug , )
Push topics to Transifex .
45,895
def pull ( self ) : topic_stats = txlib . api . statistics . Statistics . get ( project_slug = self . tx_project_slug , resource_slug = self . TOPIC_STRINGS_SLUG , ) translated = { } for locale in self . enabled_locales : if not self . _process_locale ( locale ) : continue locale_stats = getattr ( topic_stats , locale , None ) if locale_stats is None : self . log . debug ( 'Locale %s not present when pulling topics.' % ( locale , ) ) continue if locale_stats [ 'completed' ] == '100%' : translation = txlib . api . translations . Translation . get ( project_slug = self . tx_project_slug , slug = self . TOPIC_STRINGS_SLUG , lang = locale , ) translated [ locale ] = babel . messages . pofile . read_po ( StringIO ( translation . content . encode ( 'utf-8' ) ) ) for topic in self . desk . topics ( ) : for locale in translated : if topic . name in translated [ locale ] : self . log . debug ( 'Updating topic (%s) for locale (%s)' % ( topic . name , locale ) , ) if locale in topic . translations : topic . translations [ locale ] . update ( name = translated [ locale ] [ topic . name ] . string , ) else : topic . translations . create ( locale = locale , name = translated [ locale ] [ topic . name ] . string , ) else : self . log . error ( 'Topic name (%s) does not exist in locale (%s)' % ( topic [ 'name' ] , locale ) , )
Pull topics from Transifex .
45,896
def parse_resource_document ( self , content ) : content = content . strip ( ) if not content . startswith ( '<html>' ) : return dict ( body = content ) result = { } if '<title>' in content and '</title>' in content : result [ 'subject' ] = content [ content . find ( '<title>' ) + 7 : content . find ( '</title>' ) ] . strip ( ) result [ 'body' ] = content [ content . find ( '<body>' ) + 6 : content . find ( '</body>' ) ] . strip ( ) return result
Return a dict with the keys title content tags for content .
45,897
def push ( self ) : tx = Tx ( self . tx_project_slug ) if self . options . resources : articles = [ self . desk . articles ( ) . by_id ( r . strip ( ) ) for r in self . options . resources . split ( ',' ) ] else : articles = self . desk . articles ( ) for a in articles : self . log . debug ( 'Inspecting Desk resource %s' , a . api_href ) for translation in a . translations . items ( ) . values ( ) : our_locale = self . desk_to_our_locale ( translation . locale ) self . log . debug ( 'Checking locale %s' , translation . locale ) if not self . _process_locale ( translation . locale ) : self . log . debug ( 'Skipping locale.' ) continue tx . get_project ( our_locale ) a_id = a . api_href . rsplit ( '/' , 1 ) [ 1 ] if ( self . options . force or not tx . resource_exists ( a_id , our_locale ) or translation . outdated ) : self . log . info ( 'Resource %(id)s out of date in %(locale)s; updating.' % { 'id' : a_id , 'locale' : our_locale , } , ) tx . create_or_update_resource ( a_id , our_locale , self . make_resource_title ( a ) , self . make_resource_document ( a . subject , a . body ) , )
Push tutorials to Transifex .
45,898
def get_project ( self , locale , source_language_code = DEFAULT_SOURCE_LANGUAGE , ** kwargs ) : try : locale_project = project . Project . get ( slug = self . get_project_slug ( locale ) ) except NotFoundError : locale_project = project . Project ( slug = self . get_project_slug ( locale ) , ) defaults = { 'name' : 'Help Center (%s)' % ( locale , ) , 'description' : 'Help Center pages to translate to %s' % ( locale , ) , 'source_language_code' : source_language_code , 'private' : True , } valid_keys = ( 'name' , 'description' ) defaults . update ( dict ( ( k , v ) for k , v in kwargs . iteritems ( ) if k in valid_keys ) ) for k , v in defaults . iteritems ( ) : setattr ( locale_project , k , v ) locale_project . save ( ) return locale_project
Gets or creates the Transifex project for the current project prefix and locale
45,899
def translation_exists ( self , slug , lang ) : try : return translations . Translation . get ( project_slug = self . get_project_slug ( lang ) , slug = slug , lang = lang , ) except ( NotFoundError , RemoteServerError ) : pass return False
Return True if the translation exists for this slug .