idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
1,500 | def _register_template ( cls , template_bytes ) : # This implementation won't work if there are nested templates, but # we can't do that anyways due to PyGObject limitations so it's ok if not hasattr ( cls , 'set_template' ) : raise TypeError ( "Requires PyGObject 3.13.2 or greater" ) cls . set_template ( template_bytes ) bound_methods = set ( ) bound_widgets = set ( ) # Walk the class, find marked callbacks and child attributes for name in dir ( cls ) : o = getattr ( cls , name , None ) if inspect . ismethod ( o ) : if hasattr ( o , '_gtk_callback' ) : bound_methods . add ( name ) # Don't need to call this, as connect_func always gets called #cls.bind_template_callback_full(name, o) elif isinstance ( o , _Child ) : cls . bind_template_child_full ( name , True , 0 ) bound_widgets . add ( name ) # Have to setup a special connect function to connect at template init # because the methods are not bound yet cls . set_connect_func ( _connect_func , cls ) cls . __gtemplate_methods__ = bound_methods cls . __gtemplate_widgets__ = bound_widgets base_init_template = cls . init_template cls . init_template = lambda s : _init_template ( s , cls , base_init_template ) | Registers the template for the widget and hooks init_template | 344 | 12 |
1,501 | def _init_template ( self , cls , base_init_template ) : # TODO: could disallow using a metaclass.. but this is good enough # .. if you disagree, feel free to fix it and issue a PR :) if self . __class__ is not cls : raise TypeError ( "Inheritance from classes with @GtkTemplate decorators " "is not allowed at this time" ) connected_signals = set ( ) self . __connected_template_signals__ = connected_signals base_init_template ( self ) for name in self . __gtemplate_widgets__ : widget = self . get_template_child ( cls , name ) self . __dict__ [ name ] = widget if widget is None : # Bug: if you bind a template child, and one of them was # not present, then the whole template is broken (and # it's not currently possible for us to know which # one is broken either -- but the stderr should show # something useful with a Gtk-CRITICAL message) raise AttributeError ( "A missing child widget was set using " "GtkTemplate.Child and the entire " "template is now broken (widgets: %s)" % ', ' . join ( self . __gtemplate_widgets__ ) ) for name in self . __gtemplate_methods__ . difference ( connected_signals ) : errmsg = ( "Signal '%s' was declared with @GtkTemplate.Callback " + "but was not present in template" ) % name warnings . warn ( errmsg , GtkTemplateWarning ) | This would be better as an override for Gtk . Widget | 347 | 13 |
1,502 | def extract_haml ( fileobj , keywords , comment_tags , options ) : import haml from mako import lexer , parsetree from mako . ext . babelplugin import extract_nodes encoding = options . get ( 'input_encoding' , options . get ( 'encoding' , None ) ) template_node = lexer . Lexer ( haml . preprocessor ( fileobj . read ( ) ) , input_encoding = encoding ) . parse ( ) for extracted in extract_nodes ( template_node . get_children ( ) , keywords , comment_tags , options ) : yield extracted | babel translation token extract function for haml files | 134 | 10 |
1,503 | def get_single_allele_from_reads ( allele_reads ) : allele_reads = list ( allele_reads ) if len ( allele_reads ) == 0 : raise ValueError ( "Expected non-empty list of AlleleRead objects" ) seq = allele_reads [ 0 ] . allele if any ( read . allele != seq for read in allele_reads ) : raise ValueError ( "Expected all AlleleRead objects to have same allele '%s', got %s" % ( seq , allele_reads ) ) return seq | Given a sequence of AlleleRead objects which are expected to all have the same allele return that allele . | 118 | 22 |
1,504 | def iter_all_children ( self ) : if self . inline_child : yield self . inline_child for x in self . children : yield x | Return an iterator that yields every node which is a child of this one . | 32 | 15 |
1,505 | def initialize ( self , * * kwargs ) : if not set ( kwargs . keys ( ) ) . issuperset ( self . init_keys ) : raise Exception ( "TransferFn needs to be initialized with %s" % ',' . join ( repr ( el ) for el in self . init_keys ) ) | Transfer functions may need additional information before the supplied numpy array can be modified in place . For instance transfer functions may have state which needs to be allocated in memory with a certain size . In other cases the transfer function may need to know about the coordinate system associated with the input data . | 72 | 57 |
1,506 | def override_plasticity_state ( self , new_plasticity_state ) : self . _plasticity_setting_stack . append ( self . plastic ) self . plastic = new_plasticity_state | Temporarily disable plasticity of internal state . | 48 | 10 |
1,507 | def register_host ( ) : pyblish . api . register_host ( "hython" ) pyblish . api . register_host ( "hpython" ) pyblish . api . register_host ( "houdini" ) | Register supported hosts | 53 | 3 |
1,508 | def maintained_selection ( ) : previous_selection = hou . selectedNodes ( ) try : yield finally : if previous_selection : for node in previous_selection : node . setSelected ( on = True ) else : for node in previous_selection : node . setSelected ( on = False ) | Maintain selection during context | 65 | 5 |
1,509 | def execute_transaction ( conn , statements : Iterable ) : with conn : with conn . cursor ( ) as cursor : for statement in statements : cursor . execute ( statement ) conn . commit ( ) | Execute several statements in single DB transaction . | 42 | 9 |
1,510 | def execute_transactions ( conn , statements : Iterable ) : with conn . cursor ( ) as cursor : for statement in statements : try : cursor . execute ( statement ) conn . commit ( ) except psycopg2 . ProgrammingError : conn . rollback ( ) | Execute several statements each as a single DB transaction . | 56 | 11 |
1,511 | def execute_closing_transaction ( statements : Iterable ) : with closing ( connect ( ) ) as conn : with conn . cursor ( ) as cursor : for statement in statements : cursor . execute ( statement ) | Open a connection commit a transaction and close it . | 45 | 10 |
1,512 | def select ( conn , query : str , params = None , name = None , itersize = 5000 ) : with conn . cursor ( name , cursor_factory = NamedTupleCursor ) as cursor : cursor . itersize = itersize cursor . execute ( query , params ) for result in cursor : yield result | Return a select statement s results as a namedtuple . | 69 | 12 |
1,513 | def select_dict ( conn , query : str , params = None , name = None , itersize = 5000 ) : with conn . cursor ( name , cursor_factory = RealDictCursor ) as cursor : cursor . itersize = itersize cursor . execute ( query , params ) for result in cursor : yield result | Return a select statement s results as dictionary . | 71 | 9 |
1,514 | def select_each ( conn , query : str , parameter_groups , name = None ) : with conn : with conn . cursor ( name = name ) as cursor : for parameters in parameter_groups : cursor . execute ( query , parameters ) yield cursor . fetchone ( ) | Run select query for each parameter set in single transaction . | 57 | 11 |
1,515 | def query_columns ( conn , query , name = None ) : with conn . cursor ( name ) as cursor : cursor . itersize = 1 cursor . execute ( query ) cursor . fetchmany ( 0 ) column_names = [ column . name for column in cursor . description ] return column_names | Lightweight query to retrieve column list of select query . | 64 | 11 |
1,516 | def from_variant_and_transcript ( cls , variant , transcript , context_size ) : if not transcript . contains_start_codon : logger . info ( "Expected transcript %s for variant %s to have start codon" , transcript . name , variant ) return None if not transcript . contains_stop_codon : logger . info ( "Expected transcript %s for variant %s to have stop codon" , transcript . name , variant ) return None if not transcript . protein_sequence : logger . info ( "Expected transript %s for variant %s to have protein sequence" , transcript . name , variant ) return None sequence_key = ReferenceSequenceKey . from_variant_and_transcript ( variant = variant , transcript = transcript , context_size = context_size ) if sequence_key is None : logger . info ( "No sequence key for variant %s on transcript %s" , variant , transcript . name ) return None return cls . from_variant_and_transcript_and_sequence_key ( variant = variant , transcript = transcript , sequence_key = sequence_key ) | Extracts the reference sequence around a variant locus on a particular transcript and determines the reading frame at the start of that sequence context . | 244 | 28 |
1,517 | def create_readme_with_long_description ( ) : this_dir = os . path . abspath ( os . path . dirname ( __file__ ) ) readme_md = os . path . join ( this_dir , 'README.md' ) readme = os . path . join ( this_dir , 'README' ) if os . path . exists ( readme_md ) : # this is the case when running `python setup.py sdist` if os . path . exists ( readme ) : os . remove ( readme ) try : import pypandoc long_description = pypandoc . convert ( readme_md , 'rst' , format = 'md' ) except ( ImportError ) : with open ( readme_md , encoding = 'utf-8' ) as in_ : long_description = in_ . read ( ) with open ( readme , 'w' ) as out : out . write ( long_description ) else : # this is in case of `pip install fabsetup-x.y.z.tar.gz` with open ( readme , encoding = 'utf-8' ) as in_ : long_description = in_ . read ( ) return long_description | Try to convert content of README . md into rst format using pypandoc write it into README and return it . | 271 | 27 |
1,518 | def stat ( package , graph ) : client = requests . Session ( ) for name_or_url in package : package = get_package ( name_or_url , client ) if not package : secho ( u'Invalid name or URL: "{name}"' . format ( name = name_or_url ) , fg = 'red' , file = sys . stderr ) continue try : version_downloads = package . version_downloads except NotFoundError : secho ( u'No versions found for "{0}". ' u'Skipping. . .' . format ( package . name ) , fg = 'red' , file = sys . stderr ) continue echo ( u"Fetching statistics for '{url}'. . ." . format ( url = package . package_url ) ) min_ver , min_downloads = package . min_version max_ver , max_downloads = package . max_version if min_ver is None or max_ver is None : raise click . ClickException ( 'Package has no releases' ) avg_downloads = package . average_downloads total = package . downloads echo ( ) header = u'Download statistics for {name}' . format ( name = package . name ) echo_header ( header ) if graph : echo ( ) echo ( 'Downloads by version' ) echo ( package . chart ( ) ) echo ( ) echo ( "Min downloads: {min_downloads:12,} ({min_ver})" . format ( * * locals ( ) ) ) echo ( "Max downloads: {max_downloads:12,} ({max_ver})" . format ( * * locals ( ) ) ) echo ( "Avg downloads: {avg_downloads:12,}" . format ( * * locals ( ) ) ) echo ( "Total downloads: {total:12,}" . format ( * * locals ( ) ) ) echo ( ) echo_download_summary ( package ) echo ( ) | Print download statistics for a package . | 426 | 7 |
1,519 | def browse ( package , homepage ) : p = Package ( package ) try : if homepage : secho ( u'Opening homepage for "{0}"...' . format ( package ) , bold = True ) url = p . home_page else : secho ( u'Opening PyPI page for "{0}"...' . format ( package ) , bold = True ) url = p . package_url except NotFoundError : abort_not_found ( package ) click . launch ( url ) | Browse to a package s PyPI or project homepage . | 102 | 12 |
1,520 | def search ( query , n_results , web ) : if web : secho ( u'Opening search page for "{0}"...' . format ( query ) , bold = True ) url = SEARCH_URL . format ( query = urlquote ( query ) ) click . launch ( url ) else : searcher = Searcher ( ) results = searcher . search ( query , n = n_results ) first_line = style ( u'Search results for "{0}"\n' . format ( query ) , bold = True ) echo_via_pager ( first_line + '\n' . join ( [ format_result ( result ) for result in results ] ) ) | Search for a pypi package . | 145 | 8 |
1,521 | def info ( package , long_description , classifiers , license ) : client = requests . Session ( ) for name_or_url in package : package = get_package ( name_or_url , client ) if not package : secho ( u'Invalid name or URL: "{name}"' . format ( name = name_or_url ) , fg = 'red' , file = sys . stderr ) continue # Name and summary try : info = package . data [ 'info' ] except NotFoundError : secho ( u'No versions found for "{0}". ' u'Skipping. . .' . format ( package . name ) , fg = 'red' , file = sys . stderr ) continue echo_header ( name_or_url ) if package . summary : echo ( package . summary ) # Version info echo ( ) echo ( 'Latest release: {version:12}' . format ( version = info [ 'version' ] ) ) # Long description if long_description : echo ( ) echo ( package . description ) # Download info echo ( ) echo_download_summary ( package ) # Author info echo ( ) author , author_email = package . author , package . author_email if author : echo ( u'Author: {author:12}' . format ( * * locals ( ) ) ) if author_email : echo ( u'Author email: {author_email:12}' . format ( * * locals ( ) ) ) # Maintainer info maintainer , maintainer_email = ( package . maintainer , package . maintainer_email ) if maintainer or maintainer_email : echo ( ) if maintainer : echo ( u'Maintainer: {maintainer:12}' . format ( * * locals ( ) ) ) if maintainer_email : echo ( u'Maintainer email: {maintainer_email:12}' . format ( * * locals ( ) ) ) # URLS echo ( ) echo ( u'PyPI URL: {pypi_url:12}' . format ( pypi_url = package . package_url ) ) if package . home_page : echo ( u'Home Page: {home_page:12}' . format ( home_page = package . home_page ) ) if package . docs_url : echo ( u'Documentation: {docs_url:12}' . format ( docs_url = package . docs_url ) ) # Classifiers if classifiers : echo ( ) echo ( u'Classifiers: ' ) for each in info . get ( 'classifiers' , [ ] ) : echo ( '\t' + each ) if license and package . license : echo ( ) echo ( u'License: ' , nl = False ) # license may be just a name, e.g. 'BSD' or the full license text # If a new line is found in the text, print a new line if package . license . find ( '\n' ) >= 0 or len ( package . license ) > 80 : echo ( ) echo ( package . license ) echo ( ) | Get info about a package or packages . | 671 | 8 |
1,522 | def bargraph ( data , max_key_width = 30 ) : lines = [ ] max_length = min ( max ( len ( key ) for key in data . keys ( ) ) , max_key_width ) max_val = max ( data . values ( ) ) max_val_length = max ( len ( _style_value ( val ) ) for val in data . values ( ) ) term_width = get_terminal_size ( ) [ 0 ] max_bar_width = term_width - MARGIN - ( max_length + 3 + max_val_length + 3 ) template = u"{key:{key_width}} [ {value:{val_width}} ] {bar}" for key , value in data . items ( ) : try : bar = int ( math . ceil ( max_bar_width * value / max_val ) ) * TICK except ZeroDivisionError : bar = '' line = template . format ( key = key [ : max_length ] , value = _style_value ( value ) , bar = bar , key_width = max_length , val_width = max_val_length ) lines . append ( line ) return '\n' . join ( lines ) | Return a bar graph as a string given a dictionary of data . | 264 | 13 |
1,523 | def max_version ( self ) : data = self . version_downloads if not data : return None , 0 return max ( data . items ( ) , key = lambda item : item [ 1 ] ) | Version with the most downloads . | 43 | 6 |
1,524 | def min_version ( self ) : data = self . version_downloads if not data : return ( None , 0 ) return min ( data . items ( ) , key = lambda item : item [ 1 ] ) | Version with the fewest downloads . | 45 | 7 |
1,525 | def ripping_of_cds ( ) : # install and configure ripit install_package ( 'ripit' ) install_file_legacy ( path = '~/.ripit/config' , username = env . user ) # install burnit run ( 'mkdir -p ~/bin' ) install_file_legacy ( '~/bin/burnit' ) run ( 'chmod 755 ~/bin/burnit' ) | Install the tools ripit and burnit in order to rip and burn audio cds . | 94 | 18 |
1,526 | def i3 ( ) : install_package ( 'i3' ) install_file_legacy ( path = '~/.i3/config' , username = env . user , repos_dir = 'repos' ) # setup: hide the mouse if not in use # in ~/.i3/config: 'exec /home/<USERNAME>/repos/hhpc/hhpc -i 10 &' install_packages ( [ 'make' , 'pkg-config' , 'gcc' , 'libc6-dev' , 'libx11-dev' ] ) checkup_git_repo_legacy ( url = 'https://github.com/aktau/hhpc.git' ) run ( 'cd ~/repos/hhpc && make' ) | Install and customize the tiling window manager i3 . | 171 | 11 |
1,527 | def solarized ( ) : install_packages ( [ 'rxvt-unicode' , 'tmux' , 'vim' ] ) install_file_legacy ( '~/.Xresources' ) if env . host_string == 'localhost' : run ( 'xrdb ~/.Xresources' ) # install and call term_colors run ( 'mkdir -p ~/bin' ) install_file_legacy ( '~/bin/term_colors' ) run ( 'chmod 755 ~/bin/term_colors' ) run ( '~/bin/term_colors' ) | Set solarized colors in urxvt tmux and vim . | 131 | 14 |
1,528 | def vim ( ) : install_package ( 'vim' ) print_msg ( '## install ~/.vimrc\n' ) install_file_legacy ( '~/.vimrc' ) print_msg ( '\n## set up pathogen\n' ) run ( 'mkdir -p ~/.vim/autoload ~/.vim/bundle' ) checkup_git_repo_legacy ( url = 'https://github.com/tpope/vim-pathogen.git' ) run ( 'ln -snf ~/repos/vim-pathogen/autoload/pathogen.vim ' '~/.vim/autoload/pathogen.vim' ) print_msg ( '\n## install vim packages\n' ) install_package ( 'ctags' ) # required by package tagbar repos = [ { 'name' : 'vim-colors-solarized' , 'url' : 'git://github.com/altercation/vim-colors-solarized.git' , } , { 'name' : 'nerdtree' , 'url' : 'https://github.com/scrooloose/nerdtree.git' , } , { 'name' : 'vim-nerdtree-tabs' , 'url' : 'https://github.com/jistr/vim-nerdtree-tabs.git' , } , { 'name' : 'tagbar' , 'url' : 'https://github.com/majutsushi/tagbar.git' , } , ] checkup_git_repos_legacy ( repos , base_dir = '~/.vim/bundle' ) | Customize vim install package manager pathogen and some vim - packages . | 369 | 14 |
1,529 | def pyenv ( ) : install_packages ( [ 'make' , 'build-essential' , 'libssl-dev' , 'zlib1g-dev' , 'libbz2-dev' , 'libreadline-dev' , 'libsqlite3-dev' , 'wget' , 'curl' , 'llvm' , 'libncurses5-dev' , 'libncursesw5-dev' , ] ) if exists ( '~/.pyenv' ) : run ( 'cd ~/.pyenv && git pull' ) run ( '~/.pyenv/bin/pyenv update' ) else : run ( 'curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/' 'master/bin/pyenv-installer | bash' ) # add pyenv to $PATH and set up pyenv init bash_snippet = '~/.bashrc_pyenv' install_file_legacy ( path = bash_snippet ) prefix = flo ( 'if [ -f {bash_snippet} ]; ' ) enabler = flo ( 'if [ -f {bash_snippet} ]; then source {bash_snippet}; fi' ) if env . host == 'localhost' : # FIXME: next function currently only works for localhost uncomment_or_update_or_append_line ( filename = '~/.bashrc' , prefix = prefix , new_line = enabler ) else : print ( cyan ( '\nappend to ~/.bashrc:\n\n ' ) + enabler ) | Install or update the pyenv python environment . | 352 | 9 |
1,530 | def virtualbox_host ( ) : if query_yes_no ( question = 'Uninstall virtualbox-dkms?' , default = 'yes' ) : run ( 'sudo apt-get remove virtualbox-dkms' ) install_packages ( [ 'virtualbox' , 'virtualbox-qt' , 'virtualbox-dkms' , 'virtualbox-guest-dkms' , 'virtualbox-guest-additions-iso' , ] ) users = [ env . user ] for username in users : run ( flo ( 'sudo adduser {username} vboxusers' ) ) | Install a VirtualBox host system . | 129 | 7 |
1,531 | def pencil2 ( ) : repo_name = 'pencil2' repo_dir = flo ( '~/repos/{repo_name}' ) print_msg ( '## fetch latest pencil\n' ) checkup_git_repo_legacy ( url = 'https://github.com/prikhi/pencil.git' , name = repo_name ) print_msg ( '\n## build properties\n' ) update_or_append_line ( flo ( '{repo_dir}/build/properties.sh' ) , prefix = 'export MAX_VERSION=' , new_line = "export MAX_VERSION='100.*'" ) run ( flo ( 'cat {repo_dir}/build/properties.sh' ) ) run ( flo ( 'cd {repo_dir}/build && ./build.sh linux' ) , msg = '\n## build pencil\n' ) install_user_command_legacy ( 'pencil2' , pencil2_repodir = repo_dir ) print_msg ( '\nNow You can start pencil version 2 with this command:\n\n' ' pencil2' ) | Install or update latest Pencil version 2 a GUI prototyping tool . | 256 | 14 |
1,532 | def pencil3 ( ) : repo_name = 'pencil3' repo_dir = flo ( '~/repos/{repo_name}' ) print_msg ( '## fetch latest pencil\n' ) checkup_git_repo_legacy ( url = 'https://github.com/evolus/pencil.git' , name = repo_name ) run ( flo ( 'cd {repo_dir} && npm install' ) , msg = '\n## install npms\n' ) install_user_command_legacy ( 'pencil3' , pencil3_repodir = repo_dir ) print_msg ( '\nNow You can start pencil version 3 with this command:\n\n' ' pencil3' ) | Install or update latest Pencil version 3 a GUI prototyping tool . | 168 | 14 |
1,533 | def powerline_shell ( ) : assert env . host == 'localhost' , 'This task cannot run on a remote host' # set up fonts for powerline checkup_git_repo_legacy ( 'https://github.com/powerline/fonts.git' , name = 'powerline-fonts' ) run ( 'cd ~/repos/powerline-fonts && ./install.sh' ) # run('fc-cache -vf ~/.local/share/fonts') prefix = 'URxvt*font: ' from config import fontlist line = prefix + fontlist update_or_append_line ( filename = '~/.Xresources' , prefix = prefix , new_line = line ) if env . host_string == 'localhost' : run ( 'xrdb ~/.Xresources' ) # set up powerline-shell checkup_git_repo_legacy ( 'https://github.com/banga/powerline-shell.git' ) # checkup_git_repo_legacy('https://github.com/ohnonot/powerline-shell.git') install_file_legacy ( path = '~/repos/powerline-shell/config.py' ) run ( 'cd ~/repos/powerline-shell && ./install.py' ) question = 'Use normal question mark (u003F) for untracked files instead ' 'of fancy "black question mark ornament" (u2753, which may not work)?' if query_yes_no ( question , default = 'yes' ) : filename = '~/repos/powerline-shell/powerline-shell.py' update_or_append_line ( filename , keep_backup = False , prefix = " 'untracked': u'\u2753'," , new_line = " 'untracked': u'\u003F'," ) run ( flo ( 'chmod u+x {filename}' ) ) bash_snippet = '~/.bashrc_powerline_shell' install_file_legacy ( path = bash_snippet ) prefix = flo ( 'if [ -f {bash_snippet} ]; ' ) enabler = flo ( 'if [ -f {bash_snippet} ]; then source {bash_snippet}; fi' ) uncomment_or_update_or_append_line ( filename = '~/.bashrc' , prefix = prefix , new_line = enabler ) | Install and set up powerline - shell prompt . | 545 | 10 |
1,534 | def _init_boto3_clients ( self , profile , region ) : try : session = None if profile and region : session = boto3 . session . Session ( profile_name = profile , region_name = region ) elif profile : session = boto3 . session . Session ( profile_name = profile ) elif region : session = boto3 . session . Session ( region_name = region ) else : session = boto3 . session . Session ( ) self . _cloud_formation = session . client ( 'cloudformation' ) return True except Exception as wtf : logging . error ( wtf , exc_info = True ) return False | The utililty requires boto3 clients to CloudFormation . | 142 | 13 |
1,535 | def determine_drift ( self ) : try : response = self . _cloud_formation . detect_stack_drift ( StackName = self . _stack_name ) drift_request_id = response . get ( 'StackDriftDetectionId' , None ) if drift_request_id : logging . info ( 'drift_request_id: %s - polling' , drift_request_id ) drift_calc_done = False while not drift_calc_done : time . sleep ( self . nap_time ) response = self . _cloud_formation . describe_stack_drift_detection_status ( StackDriftDetectionId = drift_request_id ) current_state = response . get ( 'DetectionStatus' , None ) logging . info ( 'describe_stack_drift_detection_status(): {}' . format ( current_state ) ) drift_calc_done = current_state in CALC_DONE_STATES drift_answer = response . get ( 'StackDriftStatus' , 'UNKNOWN' ) logging . info ( 'drift of {}: {}' . format ( self . _stack_name , drift_answer ) ) if drift_answer == 'DRIFTED' : if self . _verbose : self . _print_drift_report ( ) return False else : return True else : logging . warning ( 'drift_request_id is None' ) return False except Exception as wtf : logging . error ( wtf , exc_info = True ) return False | Determine the drift of the stack . | 335 | 9 |
1,536 | def _print_drift_report ( self ) : try : response = self . _cloud_formation . describe_stack_resources ( StackName = self . _stack_name ) rows = [ ] for resource in response . get ( 'StackResources' , [ ] ) : row = [ ] row . append ( resource . get ( 'LogicalResourceId' , 'unknown' ) ) row . append ( resource . get ( 'PhysicalResourceId' , 'unknown' ) ) row . append ( resource . get ( 'ResourceStatus' , 'unknown' ) ) row . append ( resource . get ( 'DriftInformation' , { } ) . get ( 'StackResourceDriftStatus' , 'unknown' ) ) rows . append ( row ) print ( 'Drift Report:' ) print ( tabulate ( rows , headers = [ 'Logical ID' , 'Physical ID' , 'Resource Status' , 'Drift Info' ] ) ) except Exception as wtf : logging . error ( wtf , exc_info = True ) return False return True | Report the drift of the stack . | 224 | 7 |
1,537 | def set_data ( self , data ) : if data is None : self . data_size = 0 self . data = None return self . data_size = len ( data ) # create a string buffer so that null bytes aren't interpreted # as the end of the string self . data = ctypes . cast ( ctypes . create_string_buffer ( data ) , ctypes . c_void_p ) | Use this method to set the data for this blob | 87 | 10 |
1,538 | def get_data ( self ) : array = ctypes . POINTER ( ctypes . c_char * len ( self ) ) return ctypes . cast ( self . data , array ) . contents . raw | Get the data for this blob | 44 | 6 |
1,539 | def printMetaDataFor ( archive , location ) : desc = archive . getMetadataForLocation ( location ) if desc . isEmpty ( ) : print ( " no metadata for '{0}'" . format ( location ) ) return None print ( " metadata for '{0}':" . format ( location ) ) print ( " Created : {0}" . format ( desc . getCreated ( ) . getDateAsString ( ) ) ) for i in range ( desc . getNumModified ( ) ) : print ( " Modified : {0}" . format ( desc . getModified ( i ) . getDateAsString ( ) ) ) print ( " # Creators: {0}" . format ( desc . getNumCreators ( ) ) ) for i in range ( desc . getNumCreators ( ) ) : creator = desc . getCreator ( i ) print ( " {0} {1}" . format ( creator . getGivenName ( ) , creator . getFamilyName ( ) ) ) | Prints metadata for given location . | 213 | 7 |
1,540 | def printArchive ( fileName ) : archive = CombineArchive ( ) if archive . initializeFromArchive ( fileName ) is None : print ( "Invalid Combine Archive" ) return None print ( '*' * 80 ) print ( 'Print archive:' , fileName ) print ( '*' * 80 ) printMetaDataFor ( archive , "." ) print ( "Num Entries: {0}" . format ( archive . getNumEntries ( ) ) ) for i in range ( archive . getNumEntries ( ) ) : entry = archive . getEntry ( i ) print ( " {0}: location: {1} format: {2}" . format ( i , entry . getLocation ( ) , entry . getFormat ( ) ) ) printMetaDataFor ( archive , entry . getLocation ( ) ) for j in range ( entry . getNumCrossRefs ( ) ) : print ( " {0}: crossRef location {1}" . format ( j , entry . getCrossRef ( j ) . getLocation ( ) ) ) # the entry could now be extracted via # archive.extractEntry(entry.getLocation(), <filename or folder>) # or used as string # content = archive.extractEntryToString(entry.getLocation()); archive . cleanUp ( ) | Prints content of combine archive | 273 | 6 |
1,541 | def mklink ( ) : from optparse import OptionParser parser = OptionParser ( usage = "usage: %prog [options] link target" ) parser . add_option ( '-d' , '--directory' , help = "Target is a directory (only necessary if not present)" , action = "store_true" ) options , args = parser . parse_args ( ) try : link , target = args except ValueError : parser . error ( "incorrect number of arguments" ) symlink ( target , link , options . directory ) sys . stdout . write ( "Symbolic link created: %(link)s --> %(target)s\n" % vars ( ) ) | Like cmd . exe s mklink except it will infer directory status of the target . | 151 | 18 |
1,542 | def is_reparse_point ( path ) : res = api . GetFileAttributes ( path ) return ( res != api . INVALID_FILE_ATTRIBUTES and bool ( res & api . FILE_ATTRIBUTE_REPARSE_POINT ) ) | Determine if the given path is a reparse point . Return False if the file does not exist or the file attributes cannot be determined . | 60 | 29 |
1,543 | def is_symlink ( path ) : path = _patch_path ( path ) try : return _is_symlink ( next ( find_files ( path ) ) ) # comment below workaround for PyCQA/pyflakes#376 except WindowsError as orig_error : # noqa: F841 tmpl = "Error accessing {path}: {orig_error.message}" raise builtins . WindowsError ( tmpl . format ( * * locals ( ) ) ) | Assuming path is a reparse point determine if it s a symlink . | 106 | 16 |
1,544 | def get_final_path ( path ) : desired_access = api . NULL share_mode = ( api . FILE_SHARE_READ | api . FILE_SHARE_WRITE | api . FILE_SHARE_DELETE ) security_attributes = api . LPSECURITY_ATTRIBUTES ( ) # NULL pointer hFile = api . CreateFile ( path , desired_access , share_mode , security_attributes , api . OPEN_EXISTING , api . FILE_FLAG_BACKUP_SEMANTICS , api . NULL , ) if hFile == api . INVALID_HANDLE_VALUE : raise WindowsError ( ) buf_size = api . GetFinalPathNameByHandle ( hFile , LPWSTR ( ) , 0 , api . VOLUME_NAME_DOS ) handle_nonzero_success ( buf_size ) buf = create_unicode_buffer ( buf_size ) result_length = api . GetFinalPathNameByHandle ( hFile , buf , len ( buf ) , api . VOLUME_NAME_DOS ) assert result_length < len ( buf ) handle_nonzero_success ( result_length ) handle_nonzero_success ( api . CloseHandle ( hFile ) ) return buf [ : result_length ] | r For a given path determine the ultimate location of that path . Useful for resolving symlink targets . This functions wraps the GetFinalPathNameByHandle from the Windows SDK . | 276 | 36 |
1,545 | def join ( * paths ) : paths_with_drives = map ( os . path . splitdrive , paths ) drives , paths = zip ( * paths_with_drives ) # the drive we care about is the last one in the list drive = next ( filter ( None , reversed ( drives ) ) , '' ) return os . path . join ( drive , os . path . join ( * paths ) ) | r Wrapper around os . path . join that works with Windows drive letters . | 87 | 16 |
1,546 | def resolve_path ( target , start = os . path . curdir ) : return os . path . normpath ( join ( start , target ) ) | r Find a path from start to target where target is relative to start . | 32 | 15 |
1,547 | def trace_symlink_target ( link ) : if not is_symlink ( link ) : raise ValueError ( "link must point to a symlink on the system" ) while is_symlink ( link ) : orig = os . path . dirname ( link ) link = readlink ( link ) link = resolve_path ( link , orig ) return link | Given a file that is known to be a symlink trace it to its ultimate target . | 81 | 19 |
1,548 | def patch_os_module ( ) : if not hasattr ( os , 'symlink' ) : os . symlink = symlink os . path . islink = islink if not hasattr ( os , 'readlink' ) : os . readlink = readlink | jaraco . windows provides the os . symlink and os . readlink functions . Monkey - patch the os module to include them if not present . | 61 | 31 |
1,549 | def task ( func , * args , * * kwargs ) : prefix = '\n# ' tail = '\n' return fabric . api . task ( print_full_name ( color = magenta , prefix = prefix , tail = tail ) ( print_doc1 ( func ) ) , * args , * * kwargs ) | Composition of decorator functions for inherent self - documentation on task execution . | 73 | 15 |
1,550 | def subtask ( * args , * * kwargs ) : depth = kwargs . get ( 'depth' , 2 ) prefix = kwargs . get ( 'prefix' , '\n' + '#' * depth + ' ' ) tail = kwargs . get ( 'tail' , '\n' ) doc1 = kwargs . get ( 'doc1' , False ) color = kwargs . get ( 'color' , cyan ) def real_decorator ( func ) : if doc1 : return print_full_name ( color = color , prefix = prefix , tail = tail ) ( print_doc1 ( func ) ) return print_full_name ( color = color , prefix = prefix , tail = tail ) ( func ) invoked = bool ( not args or kwargs ) if not invoked : # invoke decorator function which returns the wrapper function return real_decorator ( func = args [ 0 ] ) return real_decorator | Decorator which prints out the name of the decorated function on execution . | 210 | 15 |
1,551 | def _is_sudoer ( what_for = '' ) : if env . get ( 'nosudo' , None ) is None : if what_for : print ( yellow ( what_for ) ) with quiet ( ) : # possible outputs: # en: "Sorry, user winhost-tester may not run sudo on <hostname>" # en: "sudo: a password is required" (=> is sudoer) # de: "sudo: Ein Passwort ist notwendig" (=> is sudoer) output = run ( 'sudo -nv' , capture = True ) env . nosudo = not ( output . startswith ( 'sudo: ' ) or output == '' ) if env . nosudo : print ( 'Cannot execute sudo-commands' ) return not env . nosudo | Return True if current user is a sudoer else False . | 175 | 12 |
1,552 | def install_packages ( packages , what_for = 'for a complete setup to work properly' ) : res = True non_installed_packages = _non_installed ( packages ) packages_str = ' ' . join ( non_installed_packages ) if non_installed_packages : with quiet ( ) : dpkg = _has_dpkg ( ) hint = ' (You may have to install them manually)' do_install = False go_on = True if dpkg : if _is_sudoer ( 'Want to install dpkg packages' ) : do_install = True else : do_install is False # cannot install anything info = yellow ( ' ' . join ( [ 'This deb packages are missing to be installed' , flo ( "{what_for}: " ) , ', ' . join ( non_installed_packages ) , ] ) ) question = ' Continue anyway?' go_on = query_yes_no ( info + hint + question , default = 'no' ) else : # dpkg == False, unable to determine if packages are installed do_install = False # cannot install anything info = yellow ( ' ' . join ( [ flo ( 'Required {what_for}: ' ) , ', ' . join ( non_installed_packages ) , ] ) ) go_on = query_yes_no ( info + hint + ' Continue?' , default = 'yes' ) if not go_on : sys . exit ( 'Abort' ) if do_install : command = flo ( 'sudo apt-get install {packages_str}' ) res = run ( command ) . return_code == 0 return res | Try to install . deb packages given by list . | 344 | 10 |
1,553 | def checkup_git_repos_legacy ( repos , base_dir = '~/repos' , verbose = False , prefix = '' , postfix = '' ) : run ( flo ( 'mkdir -p {base_dir}' ) ) for repo in repos : cur_base_dir = repo . get ( 'base_dir' , base_dir ) checkup_git_repo_legacy ( url = repo [ 'url' ] , name = repo . get ( 'name' , None ) , base_dir = cur_base_dir , verbose = verbose , prefix = prefix , postfix = postfix ) | Checkout or update git repos . | 143 | 8 |
1,554 | def checkup_git_repo_legacy ( url , name = None , base_dir = '~/repos' , verbose = False , prefix = '' , postfix = '' ) : if not name : match = re . match ( r'.*/(.+)\.git' , url ) assert match , flo ( "Unable to extract repo name from '{url}'" ) name = match . group ( 1 ) assert name is not None , flo ( 'Cannot extract repo name from repo: {url}' ) assert name != '' , flo ( 'Cannot extract repo name from repo: {url} (empty)' ) if verbose : name_blue = blue ( name ) print_msg ( flo ( '{prefix}Checkout or update {name_blue}{postfix}' ) ) if not exists ( base_dir ) : run ( flo ( 'mkdir -p {base_dir}' ) ) if not exists ( flo ( '{base_dir}/{name}/.git' ) ) : run ( flo ( ' && ' . join ( [ 'cd {base_dir}' , 'git clone {url} {name}' ] ) ) , msg = 'clone repo' ) else : if verbose : print_msg ( 'update: pull from origin' ) run ( flo ( 'cd {base_dir}/{name} && git pull' ) ) return name | Checkout or update a git repo . | 304 | 8 |
1,555 | def install_file_legacy ( path , sudo = False , from_path = None , * * substitutions ) : # source paths 'from_custom' and 'from_common' from_path = from_path or path # remove beginning '/' (if any), eg '/foo/bar' -> 'foo/bar' from_tail = join ( 'files' , from_path . lstrip ( os . sep ) ) if from_path . startswith ( '~/' ) : from_tail = join ( 'files' , 'home' , 'USERNAME' , from_path [ 2 : ] ) # without beginning '~/' from_common = join ( FABFILE_DATA_DIR , from_tail ) from_custom = join ( FABSETUP_CUSTOM_DIR , from_tail ) # target path 'to_' (path or tempfile) for subst in [ 'SITENAME' , 'USER' , 'ADDON' , 'TASK' ] : sitename = substitutions . get ( subst , False ) if sitename : path = path . replace ( subst , sitename ) to_ = path if sudo : to_ = join ( os . sep , 'tmp' , 'fabsetup_' + os . path . basename ( path ) ) path_dir = dirname ( path ) # copy file if isfile ( from_custom ) : run ( flo ( 'mkdir -p {path_dir}' ) ) put ( from_custom , to_ ) elif isfile ( from_custom + '.template' ) : _install_file_from_template_legacy ( from_custom + '.template' , to_ = to_ , * * substitutions ) elif isfile ( from_common ) : run ( flo ( 'mkdir -p {path_dir}' ) ) put ( from_common , to_ ) else : _install_file_from_template_legacy ( from_common + '.template' , to_ = to_ , * * substitutions ) if sudo : run ( flo ( 'sudo mv --force {to_} {path}' ) ) | Install file with path on the host target . | 467 | 9 |
1,556 | def install_user_command_legacy ( command , * * substitutions ) : path = flo ( '~/bin/{command}' ) install_file_legacy ( path , * * substitutions ) run ( flo ( 'chmod 755 {path}' ) ) | Install command executable file into users bin dir . | 61 | 9 |
1,557 | def _line_2_pair ( line ) : key , val = line . split ( '=' ) return key . lower ( ) , val . strip ( '"' ) | Return bash variable declaration as name - value pair . | 36 | 10 |
1,558 | def extract_minors_from_setup_py ( filename_setup_py ) : # eg: minors_str = '2.6\n2.7\n3.3\n3.4\n3.5\n3.6' minors_str = fabric . api . local ( flo ( 'grep --perl-regexp --only-matching ' '"(?<=Programming Language :: Python :: )\\d+\\.\\d+" ' '{filename_setup_py}' ) , capture = True ) # eg: minors = ['2.6', '2.7', '3.3', '3.4', '3.5', '3.6'] minors = minors_str . split ( ) return minors | Extract supported python minor versions from setup . py and return them as a list of str . | 167 | 19 |
1,559 | def vim_janus ( uninstall = None ) : if uninstall is not None : uninstall_janus ( ) else : if not exists ( '~/.vim/janus' ) : print_msg ( 'not installed => install' ) install_janus ( ) else : print_msg ( 'already installed => update' ) update_janus ( ) customize_janus ( ) show_files_used_by_vim_and_janus ( ) | Install or update Janus a distribution of addons and mappings for vim . | 98 | 16 |
1,560 | def scan ( host , port = 80 , url = None , https = False , timeout = 1 , max_size = 65535 ) : starts = OrderedDict ( ) ends = OrderedDict ( ) port = int ( port ) result = dict ( host = host , port = port , state = 'closed' , durations = OrderedDict ( ) ) if url : timeout = 1 result [ 'code' ] = None starts [ 'all' ] = starts [ 'dns' ] = datetime . datetime . now ( ) # DNS Lookup try : hostip = socket . gethostbyname ( host ) result [ 'ip' ] = hostip ends [ 'dns' ] = datetime . datetime . now ( ) except socket . gaierror : raise ScanFailed ( 'DNS Lookup failed' , result = result ) # TCP Connect starts [ 'connect' ] = datetime . datetime . now ( ) network_socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) network_socket . settimeout ( timeout ) result_connection = network_socket . connect_ex ( ( hostip , port ) ) ends [ 'connect' ] = datetime . datetime . now ( ) # SSL if https : starts [ 'ssl' ] = datetime . datetime . now ( ) try : network_socket = ssl . wrap_socket ( network_socket ) except socket . timeout : raise ScanFailed ( 'SSL socket timeout' , result = result ) ends [ 'ssl' ] = datetime . datetime . now ( ) # Get request if result_connection == 0 and url : starts [ 'request' ] = datetime . datetime . now ( ) network_socket . send ( "GET {0} HTTP/1.0\r\nHost: {1}\r\n\r\n" . format ( url , host ) . encode ( 'ascii' ) ) if max_size : data = network_socket . recv ( max_size ) else : data = network_socket . recv ( ) result [ 'length' ] = len ( data ) data = data . decode ( 'ascii' , errors = 'ignore' ) result [ 'response' ] = ( data ) try : result [ 'code' ] = int ( data . split ( '\n' ) [ 0 ] . split ( ) [ 1 ] ) except IndexError : pass ends [ 'request' ] = datetime . datetime . now ( ) network_socket . close ( ) # Calculate durations ends [ 'all' ] = datetime . datetime . now ( ) for duration in starts . keys ( ) : if duration in ends . keys ( ) : result [ 'durations' ] [ duration ] = ends [ duration ] - starts [ duration ] if result_connection == 0 : result [ 'state' ] = 'open' return result | Scan a network port | 629 | 4 |
1,561 | def ping ( host , port = 80 , url = None , https = False , timeout = 1 , max_size = 65535 , sequence = 0 ) : try : result = scan ( host = host , port = port , url = url , https = https , timeout = timeout , max_size = max_size ) except ScanFailed as failure : result = failure . result result [ 'error' ] = True result [ 'error_message' ] = str ( failure ) result_obj = PingResponse ( host = host , port = port , ip = result . get ( 'ip' , None ) , sequence = sequence , durations = result . get ( 'durations' , None ) , code = result . get ( 'code' , None ) , state = result . get ( 'state' , 'unknown' ) , length = result . get ( 'length' , 0 ) , response = result . get ( 'response' , None ) , error = result . get ( 'error' , False ) , error_message = result . get ( 'error_message' , None ) , responding = True if result . get ( 'state' , 'unknown' ) in [ 'open' ] else False , start = datetime . datetime . now ( ) , end = datetime . datetime . now ( ) + result [ 'durations' ] . get ( 'all' , datetime . timedelta ( 0 ) ) if result . get ( 'durations' , None ) else None ) return result_obj | Ping a host | 322 | 3 |
1,562 | def delete ( stack , region , profile ) : ini_data = { } environment = { } environment [ 'stack_name' ] = stack if region : environment [ 'region' ] = region else : environment [ 'region' ] = find_myself ( ) if profile : environment [ 'profile' ] = profile ini_data [ 'environment' ] = environment if start_smash ( ini_data ) : sys . exit ( 0 ) else : sys . exit ( 1 ) | Delete the given CloudFormation stack . | 105 | 8 |
1,563 | def list ( region , profile ) : ini_data = { } environment = { } if region : environment [ 'region' ] = region else : environment [ 'region' ] = find_myself ( ) if profile : environment [ 'profile' ] = profile ini_data [ 'environment' ] = environment if start_list ( ini_data ) : sys . exit ( 0 ) else : sys . exit ( 1 ) | List all the CloudFormation stacks in the given region . | 92 | 12 |
1,564 | def drift ( stack , region , profile ) : logging . debug ( 'finding drift - stack: {}' . format ( stack ) ) logging . debug ( 'region: {}' . format ( region ) ) logging . debug ( 'profile: {}' . format ( profile ) ) tool = DriftTool ( Stack = stack , Region = region , Profile = profile , Verbose = True ) if tool . determine_drift ( ) : sys . exit ( 0 ) else : sys . exit ( 1 ) | Produce a CloudFormation drift report for the given stack . | 104 | 13 |
1,565 | def start_upsert ( ini_data ) : stack_driver = CloudStackUtility ( ini_data ) poll_stack = not ini_data . get ( 'no_poll' , False ) if stack_driver . upsert ( ) : logging . info ( 'stack create/update was started successfully.' ) if poll_stack : stack_tool = None try : profile = ini_data . get ( 'environment' , { } ) . get ( 'profile' ) if profile : boto3_session = boto3 . session . Session ( profile_name = profile ) else : boto3_session = boto3 . session . Session ( ) region = ini_data [ 'environment' ] [ 'region' ] stack_name = ini_data [ 'environment' ] [ 'stack_name' ] cf_client = stack_driver . get_cloud_formation_client ( ) if not cf_client : cf_client = boto3_session . client ( 'cloudformation' , region_name = region ) stack_tool = stack_tool = StackTool ( stack_name , region , cf_client ) except Exception as wtf : logging . warning ( 'there was a problems creating stack tool: {}' . format ( wtf ) ) if stack_driver . poll_stack ( ) : try : logging . info ( 'stack create/update was finished successfully.' ) stack_tool . print_stack_info ( ) except Exception as wtf : logging . warning ( 'there was a problems printing stack info: {}' . format ( wtf ) ) sys . exit ( 0 ) else : try : logging . error ( 'stack create/update was did not go well.' ) stack_tool . print_stack_events ( ) except Exception as wtf : logging . warning ( 'there was a problems printing stack events: {}' . format ( wtf ) ) sys . exit ( 1 ) else : logging . error ( 'start of stack create/update did not go well.' ) sys . exit ( 1 ) | Helper function to facilitate upsert . | 438 | 7 |
1,566 | def read_config_info ( ini_file ) : try : config = RawConfigParser ( ) config . optionxform = lambda option : option config . read ( ini_file ) the_stuff = { } for section in config . sections ( ) : the_stuff [ section ] = { } for option in config . options ( section ) : the_stuff [ section ] [ option ] = config . get ( section , option ) return the_stuff except Exception as wtf : logging . error ( 'Exception caught in read_config_info(): {}' . format ( wtf ) ) traceback . print_exc ( file = sys . stdout ) return sys . exit ( 1 ) | Read the INI file | 147 | 5 |
1,567 | def print_stack_info ( self ) : try : rest_api_id = None deployment_found = False response = self . _cf_client . describe_stack_resources ( StackName = self . _stack_name ) print ( '\nThe following resources were created:' ) rows = [ ] for resource in response [ 'StackResources' ] : if resource [ 'ResourceType' ] == 'AWS::ApiGateway::RestApi' : rest_api_id = resource [ 'PhysicalResourceId' ] elif resource [ 'ResourceType' ] == 'AWS::ApiGateway::Deployment' : deployment_found = True row = [ ] row . append ( resource [ 'ResourceType' ] ) row . append ( resource [ 'LogicalResourceId' ] ) row . append ( resource [ 'PhysicalResourceId' ] ) rows . append ( row ) ''' print('\t{}\t{}\t{}'.format( resource['ResourceType'], resource['LogicalResourceId'], resource['PhysicalResourceId'] ) ) ''' print ( tabulate ( rows , headers = [ 'Resource Type' , 'Logical ID' , 'Physical ID' ] ) ) if rest_api_id and deployment_found : url = 'https://{}.execute-api.{}.amazonaws.com/{}' . format ( rest_api_id , self . _region , '<stage>' ) print ( '\nThe deployed service can be found at this URL:' ) print ( '\t{}\n' . format ( url ) ) return response except Exception as wtf : print ( wtf ) return None | List resources from the given stack | 357 | 6 |
1,568 | def print_stack_events ( self ) : first_token = '7be7981bd6287dd8112305e8f3822a6f' keep_going = True next_token = first_token current_request_token = None rows = [ ] try : while keep_going and next_token : if next_token == first_token : response = self . _cf_client . describe_stack_events ( StackName = self . _stack_name ) else : response = self . _cf_client . describe_stack_events ( StackName = self . _stack_name , NextToken = next_token ) next_token = response . get ( 'NextToken' , None ) for event in response [ 'StackEvents' ] : row = [ ] event_time = event . get ( 'Timestamp' ) request_token = event . get ( 'ClientRequestToken' , 'unknown' ) if current_request_token is None : current_request_token = request_token elif current_request_token != request_token : keep_going = False break row . append ( event_time . strftime ( '%x %X' ) ) row . append ( event . get ( 'LogicalResourceId' ) ) row . append ( event . get ( 'ResourceStatus' ) ) row . append ( event . get ( 'ResourceStatusReason' , '' ) ) rows . append ( row ) if len ( rows ) > 0 : print ( '\nEvents for the current upsert:' ) print ( tabulate ( rows , headers = [ 'Time' , 'Logical ID' , 'Status' , 'Message' ] ) ) return True else : print ( '\nNo stack events found\n' ) except Exception as wtf : print ( wtf ) return False | List events from the given stack | 387 | 6 |
1,569 | def trac ( ) : hostname = re . sub ( r'^[^@]+@' , '' , env . host ) # without username if any sitename = query_input ( question = '\nEnter site-name of Your trac web service' , default = flo ( 'trac.{hostname}' ) ) username = env . user site_dir = flo ( '/home/{username}/sites/{sitename}' ) bin_dir = flo ( '{site_dir}/virtualenv/bin' ) # provisioning steps install_or_upgrade_virtualenv_pip_package ( ) create_directory_structure ( site_dir ) update_virtualenv ( site_dir , sitename ) set_up_trac_plugins ( sitename , site_dir , bin_dir ) set_up_gunicorn ( site_dir , sitename ) configure_nginx ( username , sitename , hostname ) if query_yes_no ( '\nRestore trac environment from backup tarball?' , default = 'no' ) : restore_tracenv_from_backup_tarball ( site_dir , bin_dir ) elif not tracenv_exists ( site_dir ) : init_tracenv ( site_dir , bin_dir , username ) upgrade_tracenv ( site_dir , bin_dir ) set_up_upstart_for_gunicorn ( sitename , username , site_dir ) | Set up or update a trac project . | 326 | 9 |
1,570 | async def query ( self , path , method = 'get' , * * params ) : if method in ( 'get' , 'post' , 'patch' , 'delete' , 'put' ) : full_path = self . host + path if method == 'get' : resp = await self . aio_sess . get ( full_path , params = params ) elif method == 'post' : resp = await self . aio_sess . post ( full_path , data = params ) elif method == 'patch' : resp = await self . aio_sess . patch ( full_path , data = params ) elif method == 'delete' : resp = await self . aio_sess . delete ( full_path , params = params , headers = params ) elif method == 'put' : resp = await self . aio_sess . put ( full_path , data = params ) async with resp : # return the content if its a binary one if resp . content_type . startswith ( 'application/pdf' ) or resp . content_type . startswith ( 'application/epub' ) : return await resp . read ( ) return await self . handle_json_response ( resp ) else : raise ValueError ( 'method expected: get, post, patch, delete, put' ) | Do a query to the System API | 291 | 7 |
1,571 | def revealjs ( basedir = None , title = None , subtitle = None , description = None , github_user = None , github_repo = None ) : basedir = basedir or query_input ( 'Base dir of the presentation?' , default = '~/repos/my_presi' ) revealjs_repo_name = 'reveal.js' revealjs_dir = flo ( '{basedir}/{revealjs_repo_name}' ) _lazy_dict [ 'presi_title' ] = title _lazy_dict [ 'presi_subtitle' ] = subtitle _lazy_dict [ 'presi_description' ] = description _lazy_dict [ 'github_user' ] = github_user _lazy_dict [ 'github_repo' ] = github_repo question = flo ( "Base dir already contains a sub dir '{revealjs_repo_name}'." ' Reset (and re-download) reveal.js codebase?' ) if not exists ( revealjs_dir ) or query_yes_no ( question , default = 'no' ) : run ( flo ( 'mkdir -p {basedir}' ) ) set_up_revealjs_codebase ( basedir , revealjs_repo_name ) install_plugins ( revealjs_dir ) apply_customizations ( repo_dir = revealjs_dir ) if exists ( revealjs_dir ) : install_files_in_basedir ( basedir , repo_dir = revealjs_dir ) init_git_repo ( basedir ) create_github_remote_repo ( basedir ) setup_npm ( revealjs_dir ) else : print ( 'abort' ) | Set up or update a reveals . js presentation with slides written in markdown . | 382 | 16 |
1,572 | def tweak_css ( repo_dir ) : print_msg ( "* don't capitalize titles (no uppercase headings)" ) files = [ 'beige.css' , 'black.css' , 'blood.css' , 'league.css' , 'moon.css' , 'night.css' , 'serif.css' , 'simple.css' , 'sky.css' , 'solarized.css' , 'white.css' , ] line = ' text-transform: uppercase;' for file_ in files : update_or_append_line ( filename = flo ( '{repo_dir}/css/theme/{file_}' ) , prefix = line , new_line = flo ( '/*{line}*/' ) ) print_msg ( '* images without border' ) data = [ { 'file' : 'beige.css' , 'line' : ' border: 4px solid #333;' } , { 'file' : 'black.css' , 'line' : ' border: 4px solid #fff;' } , { 'file' : 'blood.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'league.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'moon.css' , 'line' : ' border: 4px solid #93a1a1;' } , { 'file' : 'night.css' , 'line' : ' border: 4px solid #eee;' } , { 'file' : 'serif.css' , 'line' : ' border: 4px solid #000;' } , { 'file' : 'simple.css' , 'line' : ' border: 4px solid #000;' } , { 'file' : 'sky.css' , 'line' : ' border: 4px solid #333;' } , { 'file' : 'solarized.css' , 'line' : ' border: 4px solid #657b83;' } , { 'file' : 'white.css' , 'line' : ' border: 4px solid #222;' } , ] for item in data : file_ = item [ 'file' ] lines = [ item [ 'line' ] , ] lines . extend ( [ ' box-shadow: 0 0 10px rgba(0, 0, 0, 0.15); }' , ' box-shadow: 0 0 20px rgba(0, 0, 0, 0.55); }' ] ) for line in lines : update_or_append_line ( filename = flo ( '{repo_dir}/css/theme/{file_}' ) , prefix = line , new_line = flo ( '/*{line}*/' ) ) | Comment out some css settings . | 632 | 7 |
1,573 | def decktape ( ) : run ( 'mkdir -p ~/bin/decktape' ) if not exists ( '~/bin/decktape/decktape-1.0.0' ) : print_msg ( '\n## download decktape 1.0.0\n' ) run ( 'cd ~/bin/decktape && ' 'curl -L https://github.com/astefanutti/decktape/archive/' 'v1.0.0.tar.gz | tar -xz --exclude phantomjs' ) run ( 'cd ~/bin/decktape/decktape-1.0.0 && ' 'curl -L https://github.com/astefanutti/decktape/releases/' 'download/v1.0.0/phantomjs-linux-x86-64 -o phantomjs' ) run ( 'cd ~/bin/decktape/decktape-1.0.0 && ' 'chmod +x phantomjs' ) run ( 'ln -snf ~/bin/decktape/decktape-1.0.0 ~/bin/decktape/active' , msg = '\n## link installed decktape version as active' ) print_msg ( '\nCreate PDF from reveal.js presentation:\n\n ' '# serve presentation:\n ' 'cd ~/repos/my_presi/reveal.js/ && npm start\n\n ' '# create pdf in another shell:\n ' 'cd ~/bin/decktape/active && \\\n ' './phantomjs decktape.js --size 1280x800 localhost:8000 ' '~/repos/my_presi/my_presi.pdf' ) | Install DeckTape . | 394 | 5 |
1,574 | def revealjs_template ( ) : from config import basedir , github_user , github_repo run ( flo ( 'rm -f {basedir}/index.html' ) ) run ( flo ( 'rm -f {basedir}/slides.md' ) ) run ( flo ( 'rm -f {basedir}/README.md' ) ) run ( flo ( 'rm -rf {basedir}/img/' ) ) title = 'reveal.js template' subtitle = '[reveal.js][3] presentation written ' 'in [markdown][4] set up with [fabric][5] & [fabsetup][6]' description = '''\ This presentation shows how to create a reveal.js presentation which will be set up with the fabric task `setup.revealjs` of fabsetup. Also, you can use this presentation source as a reveal.js template: * Checkout this repo * Then set the title in the `index.html` and edit the `slides.md`.''' execute ( revealjs , basedir , title , subtitle , description , github_user , github_repo ) # (README.md was removed, but not the github remote repo) print_msg ( '\n## Re-add github repo infos into README.md' ) basename = os . path . basename ( basedir ) _insert_repo_infos_into_readme ( basedir , github_user = _lazy ( 'github_user' ) , github_repo = _lazy ( 'github_repo' , default = basename ) ) print_msg ( '\n## Assure symbolic link not tracked by git exists\n' ) run ( flo ( 'ln -snf ../reveal.js {basedir}/reveal.js/reveal.js' ) ) | Create or update the template presentation demo using task revealjs . | 405 | 12 |
1,575 | def spatialDomainNoGrid ( self ) : self . w = np . zeros ( self . xw . shape ) if self . Debug : print ( "w = " ) print ( self . w . shape ) for i in range ( len ( self . q ) ) : # More efficient if we have created some 0-load points # (e.g., for where we want output) if self . q [ i ] != 0 : dist = np . abs ( self . xw - self . x [ i ] ) self . w -= self . q [ i ] * self . coeff * np . exp ( - dist / self . alpha ) * ( np . cos ( dist / self . alpha ) + np . sin ( dist / self . alpha ) ) | Superposition of analytical solutions without a gridded domain | 161 | 11 |
1,576 | def build_diagonals ( self ) : ########################################################## # INCORPORATE BOUNDARY CONDITIONS INTO COEFFICIENT ARRAY # ########################################################## # Roll to keep the proper coefficients at the proper places in the # arrays: Python will naturally just do vertical shifts instead of # diagonal shifts, so this takes into account the horizontal compoent # to ensure that boundary values are at the right place. self . l2 = np . roll ( self . l2 , - 2 ) self . l1 = np . roll ( self . l1 , - 1 ) self . r1 = np . roll ( self . r1 , 1 ) self . r2 = np . roll ( self . r2 , 2 ) # Then assemble these rows: this is where the periodic boundary condition # can matter. if self . coeff_matrix is not None : pass elif self . BC_E == 'Periodic' and self . BC_W == 'Periodic' : # In this case, the boundary-condition-related stacking has already # happened inside b.c.-handling function. This is because periodic # boundary conditions require extra diagonals to exist on the edges of # the solution array pass else : self . diags = np . vstack ( ( self . l2 , self . l1 , self . c0 , self . r1 , self . r2 ) ) self . offsets = np . array ( [ - 2 , - 1 , 0 , 1 , 2 ] ) # Everybody now (including periodic b.c. cases) self . coeff_matrix = spdiags ( self . diags , self . offsets , self . nx , self . nx , format = 'csr' ) | Builds the diagonals for the coefficient array | 371 | 10 |
1,577 | def createArchiveExample ( fileName ) : print ( '*' * 80 ) print ( 'Create archive' ) print ( '*' * 80 ) archive = CombineArchive ( ) archive . addFile ( fileName , # filename "./models/model.xml" , # target file name KnownFormats . lookupFormat ( "sbml" ) , # look up identifier for SBML models True # mark file as master ) # add metadata to the archive itself description = OmexDescription ( ) description . setAbout ( "." ) description . setDescription ( "Simple test archive including one SBML model" ) description . setCreated ( OmexDescription . getCurrentDateAndTime ( ) ) creator = VCard ( ) creator . setFamilyName ( "Bergmann" ) creator . setGivenName ( "Frank" ) creator . setEmail ( "fbergman@caltech.edu" ) creator . setOrganization ( "Caltech" ) description . addCreator ( creator ) archive . addMetadata ( "." , description ) # add metadata to the added file location = "./models/model.xml" description = OmexDescription ( ) description . setAbout ( location ) description . setDescription ( "SBML model" ) description . setCreated ( OmexDescription . getCurrentDateAndTime ( ) ) archive . addMetadata ( location , description ) # write the archive out_file = "out.omex" archive . writeToFile ( out_file ) print ( 'Archive created:' , out_file ) | Creates Combine Archive containing the given file . | 325 | 9 |
1,578 | def calc_deviation ( values , average ) : size = len ( values ) if size < 2 : return 0 calc_sum = 0.0 for number in range ( 0 , size ) : calc_sum += math . sqrt ( ( values [ number ] - average ) ** 2 ) return math . sqrt ( ( 1.0 / ( size - 1 ) ) * ( calc_sum / size ) ) | Calculate the standard deviation of a list of values | 87 | 11 |
1,579 | def append ( self , value ) : self . count += 1 if self . count == 1 : self . old_m = self . new_m = value self . old_s = 0 else : self . new_m = self . old_m + ( value - self . old_m ) / self . count self . new_s = self . old_s + ( value - self . old_m ) * ( value - self . new_m ) self . old_m = self . new_m self . old_s = self . new_s | Append a value to the stats list | 120 | 8 |
1,580 | def pipeline ( steps , initial = None ) : def apply ( result , step ) : return step ( result ) return reduce ( apply , steps , initial ) | Chain results from a list of functions . Inverted reduce . | 32 | 12 |
1,581 | def add ( class_ , name , value , sep = ';' ) : values = class_ . get_values_list ( name , sep ) if value in values : return new_value = sep . join ( values + [ value ] ) winreg . SetValueEx ( class_ . key , name , 0 , winreg . REG_EXPAND_SZ , new_value ) class_ . notify ( ) | Add a value to a delimited variable but only when the value isn t already present . | 89 | 18 |
1,582 | def current ( class_ ) : tzi = class_ ( ) kernel32 = ctypes . windll . kernel32 getter = kernel32 . GetTimeZoneInformation getter = getattr ( kernel32 , 'GetDynamicTimeZoneInformation' , getter ) code = getter ( ctypes . byref ( tzi ) ) return code , tzi | Windows Platform SDK GetTimeZoneInformation | 75 | 7 |
1,583 | def dynamic_info ( self ) : if self . key_name : dyn_key = self . get_key ( ) . subkey ( 'Dynamic DST' ) del dyn_key [ 'FirstEntry' ] del dyn_key [ 'LastEntry' ] years = map ( int , dyn_key . keys ( ) ) values = map ( Info , dyn_key . values ( ) ) # create a range mapping that searches by descending year and matches # if the target year is greater or equal. return RangeMap ( zip ( years , values ) , RangeMap . descending , operator . ge ) else : return AnyDict ( self ) | Return a map that for a given year will return the correct Info | 135 | 13 |
1,584 | def _locate_day ( year , cutoff ) : # MS stores Sunday as 0, Python datetime stores Monday as zero target_weekday = ( cutoff . day_of_week + 6 ) % 7 # For SYSTEMTIMEs relating to time zone inforamtion, cutoff.day # is the week of the month week_of_month = cutoff . day # so the following is the first day of that week day = ( week_of_month - 1 ) * 7 + 1 result = datetime . datetime ( year , cutoff . month , day , cutoff . hour , cutoff . minute , cutoff . second , cutoff . millisecond ) # now the result is the correct week, but not necessarily # the correct day of the week days_to_go = ( target_weekday - result . weekday ( ) ) % 7 result += datetime . timedelta ( days_to_go ) # if we selected a day in the month following the target month, # move back a week or two. # This is necessary because Microsoft defines the fifth week in a month # to be the last week in a month and adding the time delta might have # pushed the result into the next month. while result . month == cutoff . month + 1 : result -= datetime . timedelta ( weeks = 1 ) return result | Takes a SYSTEMTIME object such as retrieved from a TIME_ZONE_INFORMATION structure or call to GetTimeZoneInformation and interprets it based on the given year to identify the actual day . | 274 | 41 |
1,585 | def redirect ( pattern , to , permanent = True , locale_prefix = True , anchor = None , name = None , query = None , vary = None , cache_timeout = 12 , decorators = None , re_flags = None , to_args = None , to_kwargs = None , prepend_locale = True , merge_query = False ) : if permanent : redirect_class = HttpResponsePermanentRedirect else : redirect_class = HttpResponseRedirect if locale_prefix : pattern = pattern . lstrip ( '^/' ) pattern = LOCALE_RE + pattern if re_flags : pattern = '(?{})' . format ( re_flags ) + pattern view_decorators = [ ] if cache_timeout is not None : view_decorators . append ( cache_control_expires ( cache_timeout ) ) if vary : if isinstance ( vary , basestring ) : vary = [ vary ] view_decorators . append ( vary_on_headers ( * vary ) ) if decorators : if callable ( decorators ) : view_decorators . append ( decorators ) else : view_decorators . extend ( decorators ) def _view ( request , * args , * * kwargs ) : # don't want to have 'None' in substitutions kwargs = { k : v or '' for k , v in kwargs . items ( ) } args = [ x or '' for x in args ] # If it's a callable, call it and get the url out. if callable ( to ) : to_value = to ( request , * args , * * kwargs ) else : to_value = to if to_value . startswith ( '/' ) or HTTP_RE . match ( to_value ) : redirect_url = to_value else : try : redirect_url = reverse ( to_value , args = to_args , kwargs = to_kwargs ) except NoReverseMatch : # Assume it's a URL redirect_url = to_value if prepend_locale and redirect_url . startswith ( '/' ) and kwargs . get ( 'locale' ) : redirect_url = '/{locale}' + redirect_url . lstrip ( '/' ) # use info from url captures. if args or kwargs : redirect_url = strip_tags ( force_text ( redirect_url ) . format ( * args , * * kwargs ) ) if query : if merge_query : req_query = parse_qs ( request . META . get ( 'QUERY_STRING' ) ) req_query . update ( query ) querystring = urlencode ( req_query , doseq = True ) else : querystring = urlencode ( query , doseq = True ) elif query is None : querystring = request . META . get ( 'QUERY_STRING' ) else : querystring = '' if querystring : redirect_url = '?' . join ( [ redirect_url , querystring ] ) if anchor : redirect_url = '#' . join ( [ redirect_url , anchor ] ) if PROTOCOL_RELATIVE_RE . match ( redirect_url ) : redirect_url = '/' + redirect_url . lstrip ( '/' ) return redirect_class ( redirect_url ) # Apply decorators try : # Decorators should be applied in reverse order so that input # can be sent in the order your would write nested decorators # e.g. dec1(dec2(_view)) -> [dec1, dec2] for decorator in reversed ( view_decorators ) : _view = decorator ( _view ) except TypeError : log . exception ( 'decorators not iterable or does not contain ' 'callable items' ) return url ( pattern , _view , name = name ) | Return a url matcher suited for urlpatterns . | 844 | 11 |
1,586 | def __get_table_size ( self ) : length = ctypes . wintypes . DWORD ( ) res = self . method ( None , length , False ) if res != errors . ERROR_INSUFFICIENT_BUFFER : raise RuntimeError ( "Error getting table length (%d)" % res ) return length . value | Retrieve the size of the buffer needed by calling the method with a null pointer and length of zero . This should trigger an insufficient buffer error and return the size needed for the buffer . | 71 | 37 |
1,587 | def get_table ( self ) : buffer_length = self . __get_table_size ( ) returned_buffer_length = ctypes . wintypes . DWORD ( buffer_length ) buffer = ctypes . create_string_buffer ( buffer_length ) pointer_type = ctypes . POINTER ( self . structure ) table_p = ctypes . cast ( buffer , pointer_type ) res = self . method ( table_p , returned_buffer_length , False ) if res != errors . NO_ERROR : raise RuntimeError ( "Error retrieving table (%d)" % res ) return table_p . contents | Get the table | 133 | 3 |
1,588 | def entries ( self ) : table = self . get_table ( ) entries_array = self . row_structure * table . num_entries pointer_type = ctypes . POINTER ( entries_array ) return ctypes . cast ( table . entries , pointer_type ) . contents | Using the table structure return the array of entries based on the table size . | 62 | 15 |
1,589 | def owncloud ( ) : hostname = re . sub ( r'^[^@]+@' , '' , env . host ) # without username if any sitename = query_input ( question = '\nEnter site-name of Your Owncloud web service' , default = flo ( 'owncloud.{hostname}' ) , color = cyan ) username = env . user fabfile_data_dir = FABFILE_DATA_DIR print ( magenta ( ' install owncloud' ) ) repository = '' . join ( [ 'http://download.opensuse.org/repositories/' , 'isv:/ownCloud:/community/Debian_7.0/' , ] ) with hide ( 'output' ) : sudo ( flo ( 'wget -O - {repository}Release.key | apt-key add -' ) ) filename = '/etc/apt/sources.list.d/owncloud.list' sudo ( flo ( "echo 'deb {repository} /' > {filename}" ) ) sudo ( 'apt-get update' ) install_packages ( [ 'owncloud' , 'php5-fpm' , 'php-apc' , 'memcached' , 'php5-memcache' , ] ) # This server uses nginx. owncloud pulls apache2 => Disable apache2 print ( magenta ( ' disable apache' ) ) with hide ( 'output' ) : sudo ( 'service apache2 stop' ) sudo ( 'update-rc.d apache2 disable' ) print ( magenta ( ' nginx setup for owncloud' ) ) filename = 'owncloud_site_config.template' path = flo ( '{fabfile_data_dir}/files/etc/nginx/sites-available/{filename}' ) from_str = filled_out_template ( path , username = username , sitename = sitename , hostname = hostname ) with tempfile . NamedTemporaryFile ( prefix = filename ) as tmp_file : with open ( tmp_file . name , 'w' ) as fp : fp . write ( from_str ) put ( tmp_file . name , flo ( '/tmp/{filename}' ) ) to = flo ( '/etc/nginx/sites-available/{sitename}' ) sudo ( flo ( 'mv /tmp/{filename} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) sudo ( flo ( ' ' . join ( [ 'ln -snf ../sites-available/{sitename}' , '/etc/nginx/sites-enabled/{sitename}' , ] ) ) ) # php5 fpm fast-cgi config template = 'www.conf' to = flo ( '/etc/php5/fpm/pool.d/{template}' ) from_ = flo ( '{fabfile_data_dir}/files{to}' ) put ( from_ , '/tmp/' ) sudo ( flo ( 'mv /tmp/{template} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) template = 'php.ini' to = flo ( '/etc/php5/fpm/{template}' ) from_ = flo ( '{fabfile_data_dir}/files{to}' ) put ( from_ , '/tmp/' ) sudo ( flo ( 'mv /tmp/{template} {to}' ) ) sudo ( flo ( 'chown root.root {to}' ) ) sudo ( flo ( 'chmod 644 {to}' ) ) sudo ( 'service php5-fpm restart' ) sudo ( 'service nginx reload' ) | Set up owncloud . | 850 | 5 |
1,590 | def doctree_read_handler ( app , doctree ) : # noinspection PyProtectedMember docname = sys . _getframe ( 2 ) . f_locals [ 'docname' ] if docname . startswith ( '_partial' ) : app . env . metadata [ docname ] [ 'orphan' ] = True | Add orphan to metadata for partials | 75 | 7 |
1,591 | def autodoc_skip_member_handler ( app , what , name , obj , skip , options ) : if 'YAMLTokens' in name : return True return False | Skip un parseable functions . | 38 | 6 |
1,592 | def surfplot ( self , z , titletext ) : if self . latlon : plt . imshow ( z , extent = ( 0 , self . dx * z . shape [ 0 ] , self . dy * z . shape [ 1 ] , 0 ) ) #,interpolation='nearest' plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( z , extent = ( 0 , self . dx / 1000. * z . shape [ 0 ] , self . dy / 1000. * z . shape [ 1 ] , 0 ) ) #,interpolation='nearest' plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( ) plt . title ( titletext , fontsize = 16 ) | Plot if you want to - for troubleshooting - 1 figure | 240 | 12 |
1,593 | def twoSurfplots ( self ) : # Could more elegantly just call surfplot twice # And also could include xyzinterp as an option inside surfplot. # Noted here in case anyone wants to take that on in the future... plt . subplot ( 211 ) plt . title ( 'Load thickness, mantle equivalent [m]' , fontsize = 16 ) if self . latlon : plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx * self . qs . shape [ 0 ] , self . dy * self . qs . shape [ 1 ] , 0 ) ) plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( self . qs / ( self . rho_m * self . g ) , extent = ( 0 , self . dx / 1000. * self . qs . shape [ 0 ] , self . dy / 1000. * self . qs . shape [ 1 ] , 0 ) ) plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( ) plt . subplot ( 212 ) plt . title ( 'Deflection [m]' ) if self . latlon : plt . imshow ( self . w , extent = ( 0 , self . dx * self . w . shape [ 0 ] , self . dy * self . w . shape [ 1 ] , 0 ) ) plt . xlabel ( 'longitude [deg E]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'latitude [deg N]' , fontsize = 12 , fontweight = 'bold' ) else : plt . imshow ( self . w , extent = ( 0 , self . dx / 1000. * self . w . shape [ 0 ] , self . dy / 1000. * self . w . shape [ 1 ] , 0 ) ) plt . xlabel ( 'x [km]' , fontsize = 12 , fontweight = 'bold' ) plt . ylabel ( 'y [km]' , fontsize = 12 , fontweight = 'bold' ) plt . colorbar ( ) | Plot multiple subplot figure for 2D array | 550 | 9 |
1,594 | def outputDeflections ( self ) : try : # If wOutFile exists, has already been set by a setter self . wOutFile if self . Verbose : print ( "Output filename provided." ) # Otherwise, it needs to be set by an configuration file except : try : self . wOutFile = self . configGet ( "string" , "output" , "DeflectionOut" , optional = True ) except : # if there is no parsable output string, do not generate output; # this allows the user to leave the line blank and produce no output if self . Debug : print ( "No output filename provided:" ) print ( " not writing any deflection output to file" ) if self . wOutFile : if self . wOutFile [ - 4 : ] == '.npy' : from numpy import save save ( self . wOutFile , self . w ) else : from numpy import savetxt # Shouldn't need more than mm precision, at very most savetxt ( self . wOutFile , self . w , fmt = '%.3f' ) if self . Verbose : print ( "Saving deflections --> " + self . wOutFile ) | Outputs a grid of deflections if an output directory is defined in the configuration file If the filename given in the configuration file ends in . npy then a binary numpy grid will be exported . Otherwise an ASCII grid will be exported . | 252 | 48 |
1,595 | def TeArraySizeCheck ( self ) : # Only if they are both defined and are arrays # Both being arrays is a possible bug in this check routine that I have # intentionally introduced if type ( self . Te ) == np . ndarray and type ( self . qs ) == np . ndarray : # Doesn't touch non-arrays or 1D arrays if type ( self . Te ) is np . ndarray : if ( np . array ( self . Te . shape ) != np . array ( self . qs . shape ) ) . any ( ) : sys . exit ( "q0 and Te arrays have incompatible shapes. Exiting." ) else : if self . Debug : print ( "Te and qs array sizes pass consistency check" ) | Checks that Te and q0 array sizes are compatible For finite difference solution . | 160 | 16 |
1,596 | def FD ( self ) : if self . Verbose : print ( "Finite Difference Solution Technique" ) # Used to check for coeff_matrix here, but now doing so in self.bc_check() # called by f1d and f2d at the start # # Define a stress-based qs = q0 # But only if the latter has not already been defined # (e.g., by the getters and setters) try : self . qs except : self . qs = self . q0 . copy ( ) # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self . q0 # Give it x and y dimensions for help with plotting tools # (not implemented internally, but a help with external methods) self . x = np . arange ( self . dx / 2. , self . dx * self . qs . shape [ 0 ] , self . dx ) if self . dimension == 2 : self . y = np . arange ( self . dy / 2. , self . dy * self . qs . shape [ 1 ] , self . dy ) # Is there a solver defined try : self . Solver # See if it exists already except : # Well, will fail if it doesn't see this, maybe not the most reasonable # error message. if self . filename : self . Solver = self . configGet ( "string" , "numerical" , "Solver" ) else : sys . exit ( "No solver defined!" ) # Check consistency of size if coeff array was loaded if self . filename : # In the case that it is iterative, find the convergence criterion self . iterative_ConvergenceTolerance = self . configGet ( "float" , "numerical" , "ConvergenceTolerance" ) # Try to import Te grid or scalar for the finite difference solution try : self . Te = self . configGet ( "float" , "input" , "ElasticThickness" , optional = False ) if self . Te is None : Tepath = self . configGet ( "string" , "input" , "ElasticThickness" , optional = False ) self . Te = Tepath else : Tepath = None except : Tepath = self . configGet ( "string" , "input" , "ElasticThickness" , optional = False ) self . Te = Tepath if self . Te is None : if self . coeff_matrix is not None : pass else : # Have to bring this out here in case it was discovered in the # try statement that there is no value given sys . exit ( "No input elastic thickness or coefficient matrix supplied." ) # or if getter/setter if type ( self . Te ) == str : # Try to import Te grid or scalar for the finite difference solution Tepath = self . Te else : Tepath = None # in case no self.filename present (like for GRASS GIS) # If there is a Tepath, import Te # Assume that even if a coeff_matrix is defined # That the user wants Te if they gave the path if Tepath : self . Te = self . loadFile ( self . Te , close_on_fail = False ) if self . Te is None : print ( "Requested Te file is provided but cannot be located." ) print ( "No scalar elastic thickness is provided in configuration file" ) print ( "(Typo in path to input Te grid?)" ) if self . coeff_matrix is not None : print ( "But a coefficient matrix has been found." ) print ( "Calculations will be carried forward using it." ) else : print ( "Exiting." ) sys . exit ( ) # Check that Te is the proper size if it was loaded # Will be array if it was loaded if self . Te . any ( ) : self . TeArraySizeCheck ( ) | Set - up for the finite difference solution method | 866 | 9 |
1,597 | def SAS ( self ) : if self . x is None : self . x = np . arange ( self . dx / 2. , self . dx * self . qs . shape [ 0 ] , self . dx ) if self . filename : # Define the (scalar) elastic thickness self . Te = self . configGet ( "float" , "input" , "ElasticThickness" ) # Define a stress-based qs = q0 self . qs = self . q0 . copy ( ) # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self . q0 if self . dimension == 2 : if self . y is None : self . y = np . arange ( self . dy / 2. , self . dy * self . qs . shape [ 0 ] , self . dy ) # Define a stress-based qs = q0 # But only if the latter has not already been defined # (e.g., by the getters and setters) try : self . qs except : self . qs = self . q0 . copy ( ) # Remove self.q0 to avoid issues with multiply-defined inputs # q0 is the parsable input to either a qs grid or contains (x,(y),q) del self . q0 from scipy . special import kei | Set - up for the rectangularly - gridded superposition of analytical solutions method for solving flexure | 314 | 21 |
1,598 | def _c3_mro ( cls , abcs = None ) : for i , base in enumerate ( reversed ( cls . __bases__ ) ) : if hasattr ( base , '__abstractmethods__' ) : boundary = len ( cls . __bases__ ) - i break # Bases up to the last explicit ABC are considered first. else : boundary = 0 abcs = list ( abcs ) if abcs else [ ] explicit_bases = list ( cls . __bases__ [ : boundary ] ) abstract_bases = [ ] other_bases = list ( cls . __bases__ [ boundary : ] ) for base in abcs : if issubclass ( cls , base ) and not any ( issubclass ( b , base ) for b in cls . __bases__ ) : # If *cls* is the class that introduces behaviour described by # an ABC *base*, insert said ABC to its MRO. abstract_bases . append ( base ) for base in abstract_bases : abcs . remove ( base ) explicit_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in explicit_bases ] abstract_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in abstract_bases ] other_c3_mros = [ _c3_mro ( base , abcs = abcs ) for base in other_bases ] return _c3_merge ( [ [ cls ] ] + explicit_c3_mros + abstract_c3_mros + other_c3_mros + [ explicit_bases ] + [ abstract_bases ] + [ other_bases ] ) | Computes the method resolution order using extended C3 linearization . | 393 | 13 |
1,599 | def singledispatch ( function ) : # noqa registry = { } dispatch_cache = WeakKeyDictionary ( ) def ns ( ) : pass ns . cache_token = None # noinspection PyIncorrectDocstring def dispatch ( cls ) : """generic_func.dispatch(cls) -> <function implementation> Runs the dispatch algorithm to return the best available implementation for the given *cls* registered on *generic_func*. """ if ns . cache_token is not None : current_token = get_cache_token ( ) if ns . cache_token != current_token : dispatch_cache . clear ( ) ns . cache_token = current_token try : impl = dispatch_cache [ cls ] except KeyError : try : impl = registry [ cls ] except KeyError : impl = _find_impl ( cls , registry ) dispatch_cache [ cls ] = impl return impl # noinspection PyIncorrectDocstring def register ( cls , func = None ) : """generic_func.register(cls, func) -> func Registers a new implementation for the given *cls* on a *generic_func*. """ if func is None : return lambda f : register ( cls , f ) registry [ cls ] = func if ns . cache_token is None and hasattr ( cls , '__abstractmethods__' ) : ns . cache_token = get_cache_token ( ) dispatch_cache . clear ( ) return func def wrapper ( * args , * * kw ) : return dispatch ( args [ 0 ] . __class__ ) ( * args , * * kw ) registry [ object ] = function wrapper . register = register wrapper . dispatch = dispatch wrapper . registry = MappingProxyType ( registry ) wrapper . _clear_cache = dispatch_cache . clear update_wrapper ( wrapper , function ) return wrapper | Single - dispatch generic function decorator . | 400 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.