idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
35,400
def from_dict ( cls , entries , ** kwargs ) : from dask . base import tokenize cat = cls ( ** kwargs ) cat . _entries = entries cat . _tok = tokenize ( kwargs , entries ) return cat
Create Catalog from the given set of entries
35,401
def reload ( self ) : if time . time ( ) - self . updated > self . ttl : self . force_reload ( )
Reload catalog if sufficient time has passed
35,402
def filter ( self , func ) : return Catalog . from_dict ( { key : entry for key , entry in self . items ( ) if func ( entry ) } )
Create a Catalog of a subset of entries based on a condition
35,403
def walk ( self , sofar = None , prefix = None , depth = 2 ) : out = sofar if sofar is not None else { } prefix = [ ] if prefix is None else prefix for name , item in self . _entries . items ( ) : if item . _container == 'catalog' and depth > 1 : try : item ( ) . walk ( out , prefix + [ name ] , depth - 1 ) except Exce...
Get all entries in this catalog and sub - catalogs
35,404
def serialize ( self ) : import yaml output = { "metadata" : self . metadata , "sources" : { } , "name" : self . name } for key , entry in self . items ( ) : output [ "sources" ] [ key ] = entry . _captured_init_kwargs return yaml . dump ( output )
Produce YAML version of this catalog .
35,405
def save ( self , url , storage_options = None ) : from dask . bytes import open_files with open_files ( [ url ] , ** ( storage_options or { } ) , mode = 'wt' ) [ 0 ] as f : f . write ( self . serialize ( ) )
Output this catalog to a file as YAML
35,406
def reset ( self ) : "Clear caches to force a reload." self . _page_cache . clear ( ) self . _direct_lookup_cache . clear ( ) self . _page_offset = 0 self . complete = self . _catalog . page_size is None
Clear caches to force a reload .
35,407
def cached_items ( self ) : for item in six . iteritems ( self . _page_cache ) : yield item for item in six . iteritems ( self . _direct_lookup_cache ) : yield item
Iterate over items that are already cached . Perform no requests .
35,408
def _get_http_args ( self , params ) : headers = self . http_args . get ( 'headers' , { } ) if self . auth is not None : auth_headers = self . auth . get_headers ( ) headers . update ( auth_headers ) http_args = self . http_args . copy ( ) if self . _source_id is not None : headers [ 'source_id' ] = self . _source_id h...
Return a copy of the http_args
35,409
def _load ( self ) : if self . page_size is None : params = { } else : params = { 'page_offset' : 0 , 'page_size' : 0 } http_args = self . _get_http_args ( params ) response = requests . get ( self . info_url , ** http_args ) try : response . raise_for_status ( ) except requests . HTTPError as err : six . raise_from ( ...
Fetch metadata from remote . Entries are fetched lazily .
35,410
def nice_join ( seq , sep = ", " , conjunction = "or" ) : seq = [ str ( x ) for x in seq ] if len ( seq ) <= 1 or conjunction is None : return sep . join ( seq ) else : return "%s %s %s" % ( sep . join ( seq [ : - 1 ] ) , conjunction , seq [ - 1 ] )
Join together sequences of strings into English - friendly phrases using a conjunction when appropriate .
35,411
def output_notebook ( inline = True , logo = False ) : try : import hvplot except ImportError : raise ImportError ( "The intake plotting API requires hvplot." "hvplot may be installed with:\n\n" "`conda install -c pyviz hvplot` or " "`pip install hvplot`." ) import holoviews as hv return hv . extension ( 'bokeh' , inli...
Load the notebook extension
35,412
def open_catalog ( uri = None , ** kwargs ) : driver = kwargs . pop ( 'driver' , None ) if driver is None : if uri : if ( ( isinstance ( uri , str ) and "*" in uri ) or ( ( isinstance ( uri , ( list , tuple ) ) ) and len ( uri ) > 1 ) ) : driver = 'yaml_files_cat' elif isinstance ( uri , ( list , tuple ) ) and len ( ur...
Create a Catalog object
35,413
def _persist ( source , path , ** kwargs ) : try : from intake_parquet import ParquetSource except ImportError : raise ImportError ( "Please install intake-parquet to use persistence" " on dataframe container sources." ) try : df = source . to_dask ( ) except NotImplementedError : import dask . dataframe as dd df = dd ...
Save dataframe to local persistent store
35,414
def save_conf ( fn = None ) : if fn is None : fn = cfile ( ) try : os . makedirs ( os . path . dirname ( fn ) ) except ( OSError , IOError ) : pass with open ( fn , 'w' ) as f : yaml . dump ( conf , f )
Save current configuration to file as YAML
35,415
def load_conf ( fn = None ) : if fn is None : fn = cfile ( ) if os . path . isfile ( fn ) : with open ( fn ) as f : try : conf . update ( yaml_load ( f ) ) except Exception as e : logger . warning ( 'Failure to load config file "{fn}": {e}' '' . format ( fn = fn , e = e ) )
Update global config from YAML file
35,416
def intake_path_dirs ( path ) : if isinstance ( path , ( list , tuple ) ) : return path import re pattern = re . compile ( ";" if os . name == 'nt' else r"(?<!:):(?![:/])" ) return pattern . split ( path )
Return a list of directories from the intake path .
35,417
def load_env ( ) : for key , envvar in [ [ 'cache_dir' , 'INTAKE_CACHE_DIR' ] , [ 'catalog_path' , 'INTAKE_PATH' ] , [ 'persist_path' , 'INTAKE_PERSIST_PATH' ] ] : if envvar in os . environ : conf [ key ] = make_path_posix ( os . environ [ envvar ] ) conf [ 'catalog_path' ] = intake_path_dirs ( conf [ 'catalog_path' ] ...
Analyse enviroment variables and update conf accordingly
35,418
def source ( self , source ) : BaseView . source . fset ( self , source ) if self . main_pane : self . main_pane . object = self . contents self . label_pane . object = self . label
When the source gets updated update the pane object
35,419
def contents ( self ) : if not self . _source : return ' ' * 100 contents = self . source . describe ( ) return pretty_describe ( contents )
String representation of the source s description
35,420
def _get_parts_of_format_string ( resolved_string , literal_texts , format_specs ) : _text = resolved_string bits = [ ] if literal_texts [ - 1 ] != '' and _text . endswith ( literal_texts [ - 1 ] ) : _text = _text [ : - len ( literal_texts [ - 1 ] ) ] literal_texts = literal_texts [ : - 1 ] format_specs = format_specs ...
Inner function of reverse_format returns the resolved value for each field in pattern .
35,421
def reverse_formats ( format_string , resolved_strings ) : from string import Formatter fmt = Formatter ( ) field_names = [ i [ 1 ] for i in fmt . parse ( format_string ) if i [ 1 ] ] args = { field_name : [ ] for field_name in field_names } for resolved_string in resolved_strings : for field , value in reverse_format ...
Reverse the string method format for a list of strings .
35,422
def reverse_format ( format_string , resolved_string ) : from string import Formatter from datetime import datetime fmt = Formatter ( ) args = { } format_string = make_path_posix ( format_string ) literal_texts , field_names , format_specs , conversions = zip ( * fmt . parse ( format_string ) ) if not any ( field_names...
Reverse the string method format .
35,423
def path_to_glob ( path ) : from string import Formatter fmt = Formatter ( ) if not isinstance ( path , str ) : return path literal_texts = [ i [ 0 ] for i in fmt . parse ( path ) ] index_of_empty = [ i for i , lt in enumerate ( literal_texts ) if lt == '' and i != 0 ] glob = '*' . join ( [ literal_texts [ i ] for i in...
Convert pattern style paths to glob style paths
35,424
def path_to_pattern ( path , metadata = None ) : if not isinstance ( path , str ) : return pattern = path if metadata : cache = metadata . get ( 'cache' ) if cache : regex = next ( c . get ( 'regex' ) for c in cache if c . get ( 'argkey' ) == 'urlpath' ) pattern = pattern . split ( regex ) [ - 1 ] return pattern
Remove source information from path when using chaching
35,425
def get_partition ( url , headers , source_id , container , partition ) : accepted_formats = list ( serializer . format_registry . keys ( ) ) accepted_compression = list ( serializer . compression_registry . keys ( ) ) payload = dict ( action = 'read' , source_id = source_id , accepted_formats = accepted_formats , acce...
Serializable function for fetching a data source partition
35,426
def flatten ( iterable ) : iterable = iter ( iterable ) while True : try : item = next ( iterable ) except StopIteration : break if isinstance ( item , six . string_types ) : yield item continue try : data = iter ( item ) iterable = itertools . chain ( data , iterable ) except : yield item
Flatten an arbitrarily deep list
35,427
def clamp ( value , lower = 0 , upper = sys . maxsize ) : return max ( lower , min ( upper , value ) )
Clamp float between given range
35,428
def expand_templates ( pars , context , return_left = False , client = False , getenv = True , getshell = True ) : all_vars = set ( context ) out = _expand ( pars , context , all_vars , client , getenv , getshell ) if return_left : return out , all_vars return out
Render variables in context into the set of parameters with jinja2 .
35,429
def merge_pars ( params , user_inputs , spec_pars , client = False , getenv = True , getshell = True ) : context = params . copy ( ) for par in spec_pars : val = user_inputs . get ( par . name , par . default ) if val is not None : if isinstance ( val , six . string_types ) : val = expand_defaults ( val , getenv = gete...
Produce open arguments by merging various inputs
35,430
def coerce ( dtype , value ) : if dtype is None : return value if type ( value ) . __name__ == dtype : return value op = COERCION_RULES [ dtype ] return op ( ) if value is None else op ( value )
Convert a value to a specific type .
35,431
def open_remote ( url , entry , container , user_parameters , description , http_args , page_size = None , auth = None , getenv = None , getshell = None ) : from intake . container import container_map if url . startswith ( 'intake://' ) : url = url [ len ( 'intake://' ) : ] payload = dict ( action = 'open' , name = en...
Create either local direct data source or remote streamed source
35,432
def _persist ( source , path , encoder = None ) : import posixpath from dask . bytes import open_files import dask import pickle import json from intake . source . textfiles import TextFilesSource encoder = { None : str , 'str' : str , 'json' : json . dumps , 'pickle' : pickle . dumps } [ encoder ] try : b = source . t...
Save list to files using encoding
35,433
def _ipython_display_ ( self ) : contents = self . describe ( ) display ( { 'application/json' : contents , 'text/plain' : pretty_describe ( contents ) } , metadata = { 'application/json' : { 'root' : contents [ "name" ] } } , raw = True )
Display the entry as a rich object in an IPython session .
35,434
def autodiscover ( path = None , plugin_prefix = 'intake_' ) : plugins = { } for importer , name , ispkg in pkgutil . iter_modules ( path = path ) : if name . startswith ( plugin_prefix ) : t = time . time ( ) new_plugins = load_plugins_from_module ( name ) for plugin_name , plugin in new_plugins . items ( ) : if plugi...
Scan for Intake plugin packages and return a dict of plugins .
35,435
def load_plugins_from_module ( module_name ) : plugins = { } try : if module_name . endswith ( '.py' ) : import imp mod = imp . load_source ( 'module.name' , module_name ) else : mod = importlib . import_module ( module_name ) except Exception as e : logger . debug ( "Import module <{}> failed: {}" . format ( module_na...
Imports a module and returns dictionary of discovered Intake plugins .
35,436
def _set_pattern_columns ( self , path_column ) : try : from pandas . api . types import CategoricalDtype _HAS_CDT = True except ImportError : _HAS_CDT = False col = self . _dataframe [ path_column ] paths = col . cat . categories column_by_field = { field : col . cat . codes . map ( dict ( enumerate ( values ) ) ) . a...
Get a column of values for each field in pattern
35,437
def _path_column ( self ) : path_column = self . _csv_kwargs . get ( 'include_path_column' ) if path_column is None : path_column = unique_string ( ) self . _csv_kwargs [ 'include_path_column' ] = path_column elif isinstance ( path_column , bool ) : path_column = 'path' self . _csv_kwargs [ 'include_path_column' ] = pa...
Set include_path_column in csv_kwargs and returns path column name
35,438
def _open_dataset ( self , urlpath ) : import dask . dataframe if self . pattern is None : self . _dataframe = dask . dataframe . read_csv ( urlpath , storage_options = self . _storage_options , ** self . _csv_kwargs ) return if not ( DASK_VERSION >= '0.19.0' ) : raise ValueError ( "Your version of dask is '{}'. " "The...
Open dataset using dask and use pattern fields to set new columns
35,439
def do_search ( self , arg = None ) : new_cats = [ ] for cat in self . cats : new_cat = cat . search ( self . inputs . text , depth = self . inputs . depth ) if len ( list ( new_cat ) ) > 0 : new_cats . append ( new_cat ) if len ( new_cats ) > 0 : self . done_callback ( new_cats ) self . visible = False
Do search and close panel
35,440
def _persist ( source , path , component = None , storage_options = None , ** kwargs ) : from dask . array import to_zarr , from_array from . . source . zarr import ZarrArraySource try : arr = source . to_dask ( ) except NotImplementedError : arr = from_array ( source . read ( ) , chunks = - 1 ) . rechunk ( 'auto' ) to...
Save array to local persistent store
35,441
def source ( self , source ) : BaseView . source . fset ( self , source ) if self . select : self . select . options = self . options
When the source gets updated update the the options in the selector
35,442
def get_file ( f , decoder , read ) : with f as f : if decoder is None : return list ( f ) else : d = f . read ( ) if read else f out = decoder ( d ) if isinstance ( out , ( tuple , list ) ) : return out else : return [ out ]
Serializable function to take an OpenFile object and read lines
35,443
def get_case_insensitive ( self , dictionary , key , default = None ) : lower_key = key . lower ( ) for k , v in dictionary . items ( ) : if lower_key == k . lower ( ) : return v else : return default
Case - insensitive search of a dictionary for key .
35,444
def url ( self ) : return os . path . join ( self . path , self . main . value [ 0 ] )
Path to local catalog file
35,445
def validate ( self , arg = None ) : if os . path . isdir ( self . path ) : self . validator . object = None else : self . validator . object = ICONS [ 'error' ]
Check that inputted path is valid - set validator accordingly
35,446
def add_cat ( self , arg = None ) : try : self . done_callback ( self . cat ) self . visible = False except Exception as e : self . validator . object = ICONS [ 'error' ] raise e
Add cat and close panel
35,447
def tab_change ( self , event ) : self . remove_error ( ) if event . new == 1 : self . widget . disabled = False
When tab changes remove error and enable widget if on url tab
35,448
def callback ( self , cats ) : enable = bool ( cats ) if not enable : self . search . visible = False enable_widget ( self . search_widget , enable ) enable_widget ( self . remove_widget , enable ) if self . done_callback : self . done_callback ( cats )
When a catalog is selected enable widgets that depend on that condition and do done_callback
35,449
def on_click_search_widget ( self , event ) : self . search . cats = self . cats self . search . visible = event . new if self . search . visible : self . search . watchers . append ( self . select . widget . link ( self . search , value = 'cats' ) )
When the search control is toggled set visibility and hand down cats
35,450
def no_duplicates_constructor ( loader , node , deep = False ) : mapping = { } for key_node , value_node in node . value : key = loader . construct_object ( key_node , deep = deep ) value = loader . construct_object ( value_node , deep = deep ) if key in mapping : from intake . catalog . exceptions import DuplicateKeyE...
Check for duplicate keys while loading YAML
35,451
def classname ( ob ) : import inspect if inspect . isclass ( ob ) : return '.' . join ( [ ob . __module__ , ob . __name__ ] ) else : return '.' . join ( [ ob . __class__ . __module__ , ob . __class__ . __name__ ] )
Get the object s class s name as package . module . Class
35,452
def pretty_describe ( object , nestedness = 0 , indent = 2 ) : if not isinstance ( object , dict ) : return str ( object ) sep = f'\n{" " * nestedness * indent}' out = sep . join ( ( f'{k}: {pretty_describe(v, nestedness + 1)}' for k , v in object . items ( ) ) ) if nestedness > 0 and out : return f'{sep}{out}' return ...
Maintain dict ordering - but make string version prettier
35,453
def add ( self , * args , ** kwargs ) : return self . cat . select . add ( * args , ** kwargs )
Add to list of cats
35,454
def coerce_to_list ( items , preprocess = None ) : if not isinstance ( items , list ) : items = [ items ] if preprocess : items = list ( map ( preprocess , items ) ) return items
Given an instance or list coerce to list .
35,455
def _repr_mimebundle_ ( self , * args , ** kwargs ) : try : if self . logo : p = pn . Row ( self . logo_panel , self . panel , margin = 0 ) return p . _repr_mimebundle_ ( * args , ** kwargs ) else : return self . panel . _repr_mimebundle_ ( * args , ** kwargs ) except : raise RuntimeError ( "Panel does not seem to be s...
Display in a notebook or a server
35,456
def unwatch ( self ) : if self . watchers is not None : unwatched = [ ] for watcher in self . watchers : watcher . inst . param . unwatch ( watcher ) unwatched . append ( watcher ) self . watchers = [ w for w in self . watchers if w not in unwatched ]
Get rid of any lingering watchers and remove from list
35,457
def _create_options ( self , items ) : return OrderedDict ( map ( lambda x : ( x . name , x ) , coerce_to_list ( items , self . preprocess ) ) )
Helper method to create options from list or instance .
35,458
def options ( self , new ) : options = self . _create_options ( new ) if self . widget . value : self . widget . set_param ( options = options , value = list ( options . values ( ) ) [ : 1 ] ) else : self . widget . options = options self . widget . value = list ( options . values ( ) ) [ : 1 ]
Set options from list or instance of named item
35,459
def add ( self , items ) : options = self . _create_options ( items ) for k , v in options . items ( ) : if k in self . labels and v not in self . items : options . pop ( k ) count = 0 while f'{k}_{count}' in self . labels : count += 1 options [ f'{k}_{count}' ] = v self . widget . options . update ( options ) self . w...
Add items to options
35,460
def remove ( self , items ) : items = coerce_to_list ( items ) new_options = { k : v for k , v in self . options . items ( ) if v not in items } self . widget . options = new_options self . widget . param . trigger ( 'options' )
Remove items from options
35,461
def selected ( self , new ) : def preprocess ( item ) : if isinstance ( item , str ) : return self . options [ item ] return item items = coerce_to_list ( new , preprocess ) self . widget . value = items
Set selected from list or instance of object or name .
35,462
def source ( self , source ) : if isinstance ( source , list ) : source = source [ 0 ] if len ( source ) > 0 else None self . _source = source
When the source gets updated update the select widget
35,463
def callback ( self , sources ) : enable = bool ( sources ) if not enable : self . plot_widget . value = False enable_widget ( self . plot_widget , enable ) if self . done_callback : self . done_callback ( sources )
When a source is selected enable widgets that depend on that condition and do done_callback
35,464
def on_click_plot_widget ( self , event ) : self . plot . source = self . sources self . plot . visible = event . new if self . plot . visible : self . plot . watchers . append ( self . select . widget . link ( self . plot , value = 'source' ) )
When the plot control is toggled set visibility and hand down source
35,465
def sanitize_path ( path ) : storage_option = infer_storage_options ( path ) protocol = storage_option [ 'protocol' ] if protocol in ( 'http' , 'https' ) : path = os . path . normpath ( path . replace ( "{}://" . format ( protocol ) , '' ) ) elif protocol == 'file' : path = os . path . normpath ( path ) path = path . r...
Utility for cleaning up paths .
35,466
def _download ( file_in , file_out , blocksize , output = False ) : with warnings . catch_warnings ( ) : warnings . filterwarnings ( 'ignore' ) if output : try : from tqdm . autonotebook import tqdm except ImportError : logger . warn ( "Cache progress bar requires tqdm to be installed:" " conda/pip install tqdm" ) outp...
Read from input and write to output file in blocks
35,467
def make_caches ( driver , specs , catdir = None , cache_dir = None , storage_options = { } ) : if specs is None : return [ ] return [ registry . get ( spec [ 'type' ] , FileCache ) ( driver , spec , catdir = catdir , cache_dir = cache_dir , storage_options = storage_options ) for spec in specs ]
Creates Cache objects from the cache_specs provided in the catalog yaml file
35,468
def load ( self , urlpath , output = None , ** kwargs ) : if conf . get ( 'cache_disabled' , False ) : return [ urlpath ] self . output = output if output is not None else conf . get ( 'cache_download_progress' , True ) cache_paths = self . _from_metadata ( urlpath ) if cache_paths is None : files_in , files_out = self...
Downloads data from a given url generates a hashed filename logs metadata and caches it locally .
35,469
def _load ( self , files_in , files_out , urlpath , meta = True ) : import dask out = [ ] outnames = [ ] for file_in , file_out in zip ( files_in , files_out ) : cache_path = file_out . path outnames . append ( cache_path ) if cache_path == urlpath : continue if not os . path . isfile ( cache_path ) : logger . debug ( ...
Download a set of files
35,470
def clear_cache ( self , urlpath ) : cache_entries = self . _metadata . pop ( urlpath , [ ] ) for cache_entry in cache_entries : try : os . remove ( cache_entry [ 'cache_path' ] ) except ( OSError , IOError ) : pass try : fn = os . path . dirname ( cache_entry [ 'cache_path' ] ) os . rmdir ( fn ) except ( OSError , IOE...
Clears cache and metadata for a given urlpath .
35,471
def clear_all ( self ) : for urlpath in self . _metadata . keys ( ) : self . clear_cache ( urlpath ) if not os . path . isdir ( self . _cache_dir ) : return for subdir in os . listdir ( self . _cache_dir ) : try : fn = posixpath . join ( self . _cache_dir , subdir ) if os . path . isdir ( fn ) : shutil . rmtree ( fn ) ...
Clears all cache and metadata .
35,472
def write_json ( dictionary , filename ) : with open ( filename , 'w' ) as data_file : json . dump ( dictionary , data_file , indent = 4 , sort_keys = True ) print ( ' + os . path . basename ( filename ) )
Write dictionary to JSON
35,473
def compare ( dicts ) : common_members = { } common_keys = reduce ( lambda x , y : x & y , map ( dict . keys , dicts ) ) for k in common_keys : common_members [ k ] = list ( reduce ( lambda x , y : x & y , [ set ( d [ k ] ) for d in dicts ] ) ) return common_members
Compare by iteration
35,474
def sort_common_members ( ) : filename = PREFIX + '/common_members.json' sorted_json_data = { } json_data = read_json ( filename ) all_keys = [ ] for key , value in json_data . items ( ) : all_keys . append ( key ) sorted_keys = sorted ( all_keys ) for key in sorted_keys : if len ( json_data [ key ] ) > 0 : sorted_json...
Sorts the keys and members
35,475
def generate_common_members ( ) : pyside = read_json ( PREFIX + '/PySide.json' ) pyside2 = read_json ( PREFIX + '/PySide2.json' ) pyqt4 = read_json ( PREFIX + '/PyQt4.json' ) pyqt5 = read_json ( PREFIX + '/PyQt5.json' ) dicts = [ pyside , pyside2 , pyqt4 , pyqt5 ] common_members = compare ( dicts ) write_json ( common_...
Generate JSON with commonly shared members
35,476
def parse ( fname ) : blocks = list ( ) with io . open ( fname , "r" , encoding = "utf-8" ) as f : in_block = False current_block = None current_header = "" for line in f : if line . startswith ( "#### " ) : current_header = line . rstrip ( ) if line . startswith ( "```" ) : in_block = False if in_block : current_block...
Return blocks of code as list of dicts
35,477
def _qInstallMessageHandler ( handler ) : def messageOutputHandler ( * args ) : if len ( args ) == 3 : msgType , logContext , msg = args elif len ( args ) == 2 : msgType , msg = args logContext = None else : raise TypeError ( "handler expected 2 or 3 arguments, got {0}" . format ( len ( args ) ) ) if isinstance ( msg ,...
Install a message handler that works in all bindings
35,478
def _import_sub_module ( module , name ) : module = __import__ ( module . __name__ + "." + name ) for level in name . split ( "." ) : module = getattr ( module , level ) return module
import_sub_module will mimic the function of importlib . import_module
35,479
def _setup ( module , extras ) : Qt . __binding__ = module . __name__ for name in list ( _common_members ) + extras : try : submodule = _import_sub_module ( module , name ) except ImportError : try : submodule = __import__ ( name ) except ImportError : continue setattr ( Qt , "_" + name , submodule ) if name not in ext...
Install common submodules
35,480
def _build_compatibility_members ( binding , decorators = None ) : decorators = decorators or dict ( ) try : import QtSiteConfig except ImportError : pass else : if hasattr ( QtSiteConfig , 'update_compatibility_decorators' ) : QtSiteConfig . update_compatibility_decorators ( binding , decorators ) _QtCompat = type ( "...
Apply binding to QtCompat
35,481
def _convert ( lines ) : def parse ( line ) : line = line . replace ( "from PySide2 import" , "from Qt import QtCompat," ) line = line . replace ( "QtWidgets.QApplication.translate" , "QtCompat.translate" ) if "QtCore.SIGNAL" in line : raise NotImplementedError ( "QtCore.SIGNAL is missing from PyQt5 " "and so Qt.py doe...
Convert compiled . ui file from PySide2 to Qt . py
35,482
def update_compatibility_decorators ( binding , decorators ) : def _widgetDecorator ( some_function ) : def wrapper ( * args , ** kwargs ) : ret = some_function ( * args , ** kwargs ) return "Test: {}" . format ( ret ) wrapper . __doc__ = some_function . __doc__ wrapper . __name__ = some_function . __name__ return wrap...
This optional function is called by Qt . py to modify the decorators applied to QtCompat namespace objects .
35,483
def load_ui_type ( uifile ) : import pysideuic import xml . etree . ElementTree as ElementTree from cStringIO import StringIO parsed = ElementTree . parse ( uifile ) widget_class = parsed . find ( 'widget' ) . get ( 'class' ) form_class = parsed . find ( 'class' ) . text with open ( uifile , 'r' ) as f : o = StringIO (...
Pyside equivalent for the loadUiType function in PyQt .
35,484
def pyside_load_ui ( uifile , base_instance = None ) : form_class , base_class = load_ui_type ( uifile ) if not base_instance : typeName = form_class . __name__ finalType = type ( typeName , ( form_class , base_class ) , { } ) base_instance = finalType ( ) else : if not isinstance ( base_instance , base_class ) : raise...
Provide PyQt4 . uic . loadUi functionality to PySide
35,485
def ExplicitlyExcludeFromIndex ( client , database_id ) : try : DeleteContainerIfExists ( client , database_id , COLLECTION_ID ) database_link = GetDatabaseLink ( database_id ) created_Container = client . CreateContainer ( database_link , { "id" : COLLECTION_ID } ) print ( created_Container ) print ( "\n" + "-" * 25 +...
The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added . There may be scenarios where you want to exclude a specific doc from the index even though all other documents are being indexed automatically . This method demonstrates how to use an index directive to control this
35,486
def ExcludePathsFromIndex ( client , database_id ) : try : DeleteContainerIfExists ( client , database_id , COLLECTION_ID ) database_link = GetDatabaseLink ( database_id ) doc_with_nested_structures = { "id" : "doc1" , "foo" : "bar" , "metaData" : "meta" , "subDoc" : { "searchable" : "searchable" , "nonSearchable" : "v...
The default behavior is for Cosmos to index every attribute in every document automatically . There are times when a document contains large amounts of information in deeply nested structures that you know you will never search on . In extreme cases like this you can exclude paths from the index to save on storage cost...
35,487
def UseRangeIndexesOnStrings ( client , database_id ) : try : DeleteContainerIfExists ( client , database_id , COLLECTION_ID ) database_link = GetDatabaseLink ( database_id ) collection_definition = { 'id' : COLLECTION_ID , 'indexingPolicy' : { 'includedPaths' : [ { 'path' : '/region/?' , 'indexes' : [ { 'kind' : docum...
Showing how range queries can be performed even on strings .
35,488
def ResolveForCreate ( self , document ) : if document is None : raise ValueError ( "document is None." ) partition_key = self . partition_key_extractor ( document ) containing_range = self . _GetContainingRange ( partition_key ) if containing_range is None : raise ValueError ( "A containing range for " + str ( partiti...
Resolves the collection for creating the document based on the partition key .
35,489
def _GetContainingRange ( self , partition_key ) : for keyrange in self . partition_map . keys ( ) : if keyrange . Contains ( partition_key ) : return keyrange return None
Gets the containing range based on the partition key .
35,490
def _GetIntersectingRanges ( self , partition_key ) : partitionkey_ranges = set ( ) intersecting_ranges = set ( ) if partition_key is None : return list ( self . partition_map . keys ( ) ) if isinstance ( partition_key , prange . Range ) : partitionkey_ranges . add ( partition_key ) elif isinstance ( partition_key , li...
Gets the intersecting ranges based on the partition key .
35,491
def _create_execution_context ( self ) : if hasattr ( self , '_database_link' ) : return base_execution_context . _MultiCollectionQueryExecutionContext ( self . _client , self . _options , self . _database_link , self . _query , self . _partition_key ) else : return execution_dispatcher . _ProxyQueryExecutionContext ( ...
instantiates the internal query execution context based .
35,492
def _Execute ( client , global_endpoint_manager , function , * args , ** kwargs ) : endpointDiscovery_retry_policy = endpoint_discovery_retry_policy . _EndpointDiscoveryRetryPolicy ( client . connection_policy , global_endpoint_manager , * args ) resourceThrottle_retry_policy = resource_throttle_retry_policy . _Resourc...
Exectutes the function with passed parameters applying all retry policies
35,493
def _GetDatabaseAccount ( self ) : try : database_account = self . _GetDatabaseAccountStub ( self . DefaultEndpoint ) return database_account except errors . HTTPFailure : for location_name in self . PreferredLocations : locational_endpoint = _GlobalEndpointManager . GetLocationalEndpoint ( self . DefaultEndpoint , loc...
Gets the database account first by using the default endpoint and if that doesn t returns use the endpoints for the preferred locations in the order they are specified to get the database account .
35,494
def CompareTo ( self , other_hash_value ) : if len ( self . hash_value ) != len ( other_hash_value ) : raise ValueError ( "Length of hashes doesn't match." ) for i in xrange ( 0 , len ( self . hash_value ) ) : if ( self . hash_value [ len ( self . hash_value ) - i - 1 ] < other_hash_value [ len ( self . hash_value ) - ...
Compares the passed hash value with the hash value of this object
35,495
def ComputeHash ( self , key ) : if key is None : raise ValueError ( "key is None." ) hash_value = self . _ComputeHash ( key ) return bytearray ( pack ( 'I' , hash_value ) )
Computes the hash of the value passed using MurmurHash3 algorithm .
35,496
def _ComputeHash ( key , seed = 0x0 ) : def fmix ( h ) : h ^= h >> 16 h = ( h * 0x85ebca6b ) & 0xFFFFFFFF h ^= h >> 13 h = ( h * 0xc2b2ae35 ) & 0xFFFFFFFF h ^= h >> 16 return h length = len ( key ) nblocks = int ( length / 4 ) h1 = seed c1 = 0xcc9e2d51 c2 = 0x1b873593 for block_start in xrange ( 0 , nblocks * 4 , 4 ) :...
Computes the hash of the value passed using MurmurHash3 algorithm with the seed value .
35,497
def create ( cls , session_token ) : version = None global_lsn = None local_lsn_by_region = { } if not session_token : return None segments = session_token . split ( cls . segment_separator ) if len ( segments ) < 2 : return None try : version = int ( segments [ 0 ] ) except ValueError as _ : return None try : global_l...
Parses session token and creates the vector session token
35,498
def _ConstructPartitions ( self , collection_links , partitions_per_node ) : collections_node_count = len ( collection_links ) partitions = [ partition . _Partition ( ) for _ in xrange ( 0 , partitions_per_node * collections_node_count ) ] index = 0 for collection_node in collection_links : hash_value = self . hash_gen...
Constructs the partitions in the consistent ring by assigning them to collection nodes using the hashing algorithm and then finally sorting the partitions based on the hash value .
35,499
def _FindPartition ( self , key ) : hash_value = self . hash_generator . ComputeHash ( key ) return self . _LowerBoundSearch ( self . partitions , hash_value )
Finds the partition from the byte array representation of the partition key .