idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
39,600 | def decode_cpu_id ( self , cpuid ) : ret = ( ) for i in cpuid . split ( ':' ) : ret += ( eval ( '0x' + i ) , ) return ret | Decode the CPU id into a string |
39,601 | def set_header ( self , port , channel ) : self . _port = port self . channel = channel self . _update_header ( ) | Set the port and channel for this packet . |
39,602 | def _set_data ( self , data ) : if type ( data ) == bytearray : self . _data = data elif type ( data ) == str : if sys . version_info < ( 3 , ) : self . _data = bytearray ( data ) else : self . _data = bytearray ( data . encode ( 'ISO-8859-1' ) ) elif type ( data ) == list or type ( data ) == tuple : self . _data = bytearray ( data ) elif sys . version_info >= ( 3 , ) and type ( data ) == bytes : self . _data = bytearray ( data ) else : raise Exception ( 'Data must be bytearray, string, list or tuple,' ' not {}' . format ( type ( data ) ) ) | Set the packet data |
39,603 | def take_off ( self , height = None , velocity = VELOCITY ) : if self . _is_flying : raise Exception ( 'Already flying' ) if not self . _cf . is_connected ( ) : raise Exception ( 'Crazyflie is not connected' ) self . _is_flying = True self . _reset_position_estimator ( ) self . _thread = _SetPointThread ( self . _cf ) self . _thread . start ( ) if height is None : height = self . default_height self . up ( height , velocity ) | Takes off that is starts the motors goes straigt up and hovers . Do not call this function if you use the with keyword . Take off is done automatically when the context is created . |
39,604 | def turn_left ( self , angle_degrees , rate = RATE ) : flight_time = angle_degrees / rate self . start_turn_left ( rate ) time . sleep ( flight_time ) self . stop ( ) | Turn to the left staying on the spot |
39,605 | def turn_right ( self , angle_degrees , rate = RATE ) : flight_time = angle_degrees / rate self . start_turn_right ( rate ) time . sleep ( flight_time ) self . stop ( ) | Turn to the right staying on the spot |
39,606 | def circle_left ( self , radius_m , velocity = VELOCITY , angle_degrees = 360.0 ) : distance = 2 * radius_m * math . pi * angle_degrees / 360.0 flight_time = distance / velocity self . start_circle_left ( radius_m , velocity ) time . sleep ( flight_time ) self . stop ( ) | Go in circle counter clock wise |
39,607 | def circle_right ( self , radius_m , velocity = VELOCITY , angle_degrees = 360.0 ) : distance = 2 * radius_m * math . pi * angle_degrees / 360.0 flight_time = distance / velocity self . start_circle_right ( radius_m , velocity ) time . sleep ( flight_time ) self . stop ( ) | Go in circle clock wise |
39,608 | def start_circle_left ( self , radius_m , velocity = VELOCITY ) : circumference = 2 * radius_m * math . pi rate = 360.0 * velocity / circumference self . _set_vel_setpoint ( velocity , 0.0 , 0.0 , - rate ) | Start a circular motion to the left . This function returns immediately . |
39,609 | def start_linear_motion ( self , velocity_x_m , velocity_y_m , velocity_z_m ) : self . _set_vel_setpoint ( velocity_x_m , velocity_y_m , velocity_z_m , 0.0 ) | Start a linear motion . This function returns immediately . |
39,610 | def set_vel_setpoint ( self , velocity_x , velocity_y , velocity_z , rate_yaw ) : self . _queue . put ( ( velocity_x , velocity_y , velocity_z , rate_yaw ) ) | Set the velocity setpoint to use for the future motion |
39,611 | def _param_callback ( self , name , value ) : print ( '{0}: {1}' . format ( name , value ) ) self . _param_check_list . remove ( name ) if len ( self . _param_check_list ) == 0 : print ( 'Have fetched all parameter values.' ) for g in self . _param_groups : self . _cf . param . remove_update_callback ( group = g , cb = self . _param_callback ) pkd = random . random ( ) print ( '' ) print ( 'Write: pid_attitude.pitch_kd={:.2f}' . format ( pkd ) ) self . _cf . param . add_update_callback ( group = 'pid_attitude' , name = 'pitch_kd' , cb = self . _a_pitch_kd_callback ) self . _cf . param . set_value ( 'pid_attitude.pitch_kd' , '{:.2f}' . format ( pkd ) ) | Generic callback registered for all the groups |
39,612 | def _a_pitch_kd_callback ( self , name , value ) : print ( 'Readback: {0}={1}' . format ( name , value ) ) self . _cf . close_link ( ) | Callback for pid_attitude . pitch_kd |
39,613 | def _scan_radio_channels ( self , cradio , start = 0 , stop = 125 ) : return list ( cradio . scan_channels ( start , stop , ( 0xff , ) ) ) | Scan for Crazyflies between the supplied channels . |
39,614 | def main ( client_secrets , scope , save , credentials , headless ) : flow = google_auth_oauthlib . flow . InstalledAppFlow . from_client_secrets_file ( client_secrets , scopes = scope ) if not headless : creds = flow . run_local_server ( ) else : creds = flow . run_console ( ) creds_data = { 'token' : creds . token , 'refresh_token' : creds . refresh_token , 'token_uri' : creds . token_uri , 'client_id' : creds . client_id , 'client_secret' : creds . client_secret , 'scopes' : creds . scopes } if save : del creds_data [ 'token' ] config_path = os . path . dirname ( credentials ) if config_path and not os . path . isdir ( config_path ) : os . makedirs ( config_path ) with open ( credentials , 'w' ) as outfile : json . dump ( creds_data , outfile ) click . echo ( 'credentials saved: %s' % credentials ) else : click . echo ( json . dumps ( creds_data ) ) | Command - line tool for obtaining authorization and credentials from a user . |
39,615 | def authorization_url ( self , ** kwargs ) : kwargs . setdefault ( 'access_type' , 'offline' ) url , state = self . oauth2session . authorization_url ( self . client_config [ 'auth_uri' ] , ** kwargs ) return url , state | Generates an authorization URL . |
39,616 | def fetch_token ( self , ** kwargs ) : kwargs . setdefault ( 'client_secret' , self . client_config [ 'client_secret' ] ) return self . oauth2session . fetch_token ( self . client_config [ 'token_uri' ] , ** kwargs ) | Completes the Authorization Flow and obtains an access token . |
39,617 | def run_console ( self , authorization_prompt_message = _DEFAULT_AUTH_PROMPT_MESSAGE , authorization_code_message = _DEFAULT_AUTH_CODE_MESSAGE , ** kwargs ) : kwargs . setdefault ( 'prompt' , 'consent' ) self . redirect_uri = self . _OOB_REDIRECT_URI auth_url , _ = self . authorization_url ( ** kwargs ) print ( authorization_prompt_message . format ( url = auth_url ) ) code = input ( authorization_code_message ) self . fetch_token ( code = code ) return self . credentials | Run the flow using the console strategy . |
39,618 | def run_local_server ( self , host = 'localhost' , port = 8080 , authorization_prompt_message = _DEFAULT_AUTH_PROMPT_MESSAGE , success_message = _DEFAULT_WEB_SUCCESS_MESSAGE , open_browser = True , ** kwargs ) : self . redirect_uri = 'http://{}:{}/' . format ( host , port ) auth_url , _ = self . authorization_url ( ** kwargs ) wsgi_app = _RedirectWSGIApp ( success_message ) local_server = wsgiref . simple_server . make_server ( host , port , wsgi_app , handler_class = _WSGIRequestHandler ) if open_browser : webbrowser . open ( auth_url , new = 1 , autoraise = True ) print ( authorization_prompt_message . format ( url = auth_url ) ) local_server . handle_request ( ) authorization_response = wsgi_app . last_request_uri . replace ( 'http' , 'https' ) self . fetch_token ( authorization_response = authorization_response ) return self . credentials | Run the flow using the server strategy . |
39,619 | def install ( self , opener ) : _opener = opener if isinstance ( opener , Opener ) else opener ( ) assert isinstance ( _opener , Opener ) , "Opener instance required" assert _opener . protocols , "must list one or more protocols" for protocol in _opener . protocols : self . _protocols [ protocol ] = _opener return opener | Install an opener . |
39,620 | def get_opener ( self , protocol ) : protocol = protocol or self . default_opener if self . load_extern : entry_point = next ( pkg_resources . iter_entry_points ( "fs.opener" , protocol ) , None ) else : entry_point = None if entry_point is None : if protocol in self . _protocols : opener_instance = self . _protocols [ protocol ] else : raise UnsupportedProtocol ( "protocol '{}' is not supported" . format ( protocol ) ) else : try : opener = entry_point . load ( ) except Exception as exception : raise EntryPointError ( "could not load entry point; {}" . format ( exception ) ) if not issubclass ( opener , Opener ) : raise EntryPointError ( "entry point did not return an opener" ) try : opener_instance = opener ( ) except Exception as exception : raise EntryPointError ( "could not instantiate opener; {}" . format ( exception ) ) return opener_instance | Get the opener class associated to a given protocol . |
39,621 | def open ( self , fs_url , writeable = True , create = False , cwd = "." , default_protocol = "osfs" , ) : if "://" not in fs_url : fs_url = "{}://{}" . format ( default_protocol , fs_url ) parse_result = parse_fs_url ( fs_url ) protocol = parse_result . protocol open_path = parse_result . path opener = self . get_opener ( protocol ) open_fs = opener . open_fs ( fs_url , parse_result , writeable , create , cwd ) return open_fs , open_path | Open a filesystem from a FS URL . |
39,622 | def manage_fs ( self , fs_url , create = False , writeable = False , cwd = "." , ) : from . . base import FS if isinstance ( fs_url , FS ) : yield fs_url else : _fs = self . open_fs ( fs_url , create = create , writeable = writeable , cwd = cwd ) try : yield _fs except : raise finally : _fs . close ( ) | Get a context manager to open and close a filesystem . |
39,623 | def copy_fs ( src_fs , dst_fs , walker = None , on_copy = None , workers = 0 , ) : return copy_dir ( src_fs , "/" , dst_fs , "/" , walker = walker , on_copy = on_copy , workers = workers ) | Copy the contents of one filesystem to another . |
39,624 | def copy_fs_if_newer ( src_fs , dst_fs , walker = None , on_copy = None , workers = 0 , ) : return copy_dir_if_newer ( src_fs , "/" , dst_fs , "/" , walker = walker , on_copy = on_copy , workers = workers ) | Copy the contents of one filesystem to another checking times . |
39,625 | def _source_is_newer ( src_fs , src_path , dst_fs , dst_path ) : try : if dst_fs . exists ( dst_path ) : namespace = ( "details" , "modified" ) src_modified = src_fs . getinfo ( src_path , namespace ) . modified if src_modified is not None : dst_modified = dst_fs . getinfo ( dst_path , namespace ) . modified return dst_modified is None or src_modified > dst_modified return True except FSError : return True | Determine if source file is newer than destination file . |
39,626 | def copy_file ( src_fs , src_path , dst_fs , dst_path , ) : with manage_fs ( src_fs , writeable = False ) as _src_fs : with manage_fs ( dst_fs , create = True ) as _dst_fs : if _src_fs is _dst_fs : _src_fs . copy ( src_path , dst_path , overwrite = True ) else : with _src_fs . lock ( ) , _dst_fs . lock ( ) : if _dst_fs . hassyspath ( dst_path ) : with _dst_fs . openbin ( dst_path , "w" ) as write_file : _src_fs . download ( src_path , write_file ) else : with _src_fs . openbin ( src_path ) as read_file : _dst_fs . upload ( dst_path , read_file ) | Copy a file from one filesystem to another . |
39,627 | def copy_file_internal ( src_fs , src_path , dst_fs , dst_path , ) : if src_fs is dst_fs : src_fs . copy ( src_path , dst_path , overwrite = True ) elif dst_fs . hassyspath ( dst_path ) : with dst_fs . openbin ( dst_path , "w" ) as write_file : src_fs . download ( src_path , write_file ) else : with src_fs . openbin ( src_path ) as read_file : dst_fs . upload ( dst_path , read_file ) | Low level copy that doesn t call manage_fs or lock . |
39,628 | def copy_file_if_newer ( src_fs , src_path , dst_fs , dst_path , ) : with manage_fs ( src_fs , writeable = False ) as _src_fs : with manage_fs ( dst_fs , create = True ) as _dst_fs : if _src_fs is _dst_fs : if _source_is_newer ( _src_fs , src_path , _dst_fs , dst_path ) : _src_fs . copy ( src_path , dst_path , overwrite = True ) return True else : return False else : with _src_fs . lock ( ) , _dst_fs . lock ( ) : if _source_is_newer ( _src_fs , src_path , _dst_fs , dst_path ) : copy_file_internal ( _src_fs , src_path , _dst_fs , dst_path ) return True else : return False | Copy a file from one filesystem to another checking times . |
39,629 | def copy_dir ( src_fs , src_path , dst_fs , dst_path , walker = None , on_copy = None , workers = 0 , ) : on_copy = on_copy or ( lambda * args : None ) walker = walker or Walker ( ) _src_path = abspath ( normpath ( src_path ) ) _dst_path = abspath ( normpath ( dst_path ) ) def src ( ) : return manage_fs ( src_fs , writeable = False ) def dst ( ) : return manage_fs ( dst_fs , create = True ) from . _bulk import Copier with src ( ) as _src_fs , dst ( ) as _dst_fs : with _src_fs . lock ( ) , _dst_fs . lock ( ) : _thread_safe = is_thread_safe ( _src_fs , _dst_fs ) with Copier ( num_workers = workers if _thread_safe else 0 ) as copier : _dst_fs . makedir ( _dst_path , recreate = True ) for dir_path , dirs , files in walker . walk ( _src_fs , _src_path ) : copy_path = combine ( _dst_path , frombase ( _src_path , dir_path ) ) for info in dirs : _dst_fs . makedir ( info . make_path ( copy_path ) , recreate = True ) for info in files : src_path = info . make_path ( dir_path ) dst_path = info . make_path ( copy_path ) copier . copy ( _src_fs , src_path , _dst_fs , dst_path ) on_copy ( _src_fs , src_path , _dst_fs , dst_path ) | Copy a directory from one filesystem to another . |
39,630 | def copy_dir_if_newer ( src_fs , src_path , dst_fs , dst_path , walker = None , on_copy = None , workers = 0 , ) : on_copy = on_copy or ( lambda * args : None ) walker = walker or Walker ( ) _src_path = abspath ( normpath ( src_path ) ) _dst_path = abspath ( normpath ( dst_path ) ) def src ( ) : return manage_fs ( src_fs , writeable = False ) def dst ( ) : return manage_fs ( dst_fs , create = True ) from . _bulk import Copier with src ( ) as _src_fs , dst ( ) as _dst_fs : with _src_fs . lock ( ) , _dst_fs . lock ( ) : _thread_safe = is_thread_safe ( _src_fs , _dst_fs ) with Copier ( num_workers = workers if _thread_safe else 0 ) as copier : _dst_fs . makedir ( _dst_path , recreate = True ) namespace = ( "details" , "modified" ) dst_state = { path : info for path , info in walker . info ( _dst_fs , _dst_path , namespace ) if info . is_file } src_state = [ ( path , info ) for path , info in walker . info ( _src_fs , _src_path , namespace ) ] for dir_path , copy_info in src_state : copy_path = combine ( _dst_path , frombase ( _src_path , dir_path ) ) if copy_info . is_dir : _dst_fs . makedir ( copy_path , recreate = True ) elif copy_info . is_file : try : src_modified = copy_info . modified dst_modified = dst_state [ dir_path ] . modified except KeyError : do_copy = True else : do_copy = ( src_modified is None or dst_modified is None or src_modified > dst_modified ) if do_copy : copier . copy ( _src_fs , dir_path , _dst_fs , copy_path ) on_copy ( _src_fs , dir_path , _dst_fs , copy_path ) | Copy a directory from one filesystem to another checking times . |
39,631 | def _parse_ftp_error ( error ) : code , _ , message = text_type ( error ) . partition ( " " ) return code , message | Extract code and message from ftp error . |
39,632 | def _open_ftp ( self ) : ftp = self . fs . _open_ftp ( ) ftp . voidcmd ( str ( "TYPE I" ) ) return ftp | Open an ftp object for the file . |
39,633 | def _parse_features ( cls , feat_response ) : features = { } if feat_response . split ( "-" ) [ 0 ] == "211" : for line in feat_response . splitlines ( ) : if line . startswith ( " " ) : key , _ , value = line [ 1 : ] . partition ( " " ) features [ key ] = value return features | Parse a dict of features from FTP feat response . |
39,634 | def _open_ftp ( self ) : _ftp = FTP ( ) _ftp . set_debuglevel ( 0 ) with ftp_errors ( self ) : _ftp . connect ( self . host , self . port , self . timeout ) _ftp . login ( self . user , self . passwd , self . acct ) self . _features = { } try : feat_response = _decode ( _ftp . sendcmd ( "FEAT" ) , "latin-1" ) except error_perm : self . encoding = "latin-1" else : self . _features = self . _parse_features ( feat_response ) self . encoding = "utf-8" if "UTF8" in self . _features else "latin-1" if not PY2 : _ftp . file = _ftp . sock . makefile ( "r" , encoding = self . encoding ) _ftp . encoding = self . encoding self . _welcome = _ftp . welcome return _ftp | Open a new ftp object . |
39,635 | def ftp_url ( self ) : url = ( "ftp://{}" . format ( self . host ) if self . port == 21 else "ftp://{}:{}" . format ( self . host , self . port ) ) return url | Get the FTP url this filesystem will open . |
39,636 | def _parse_ftp_time ( cls , time_text ) : try : tm_year = int ( time_text [ 0 : 4 ] ) tm_month = int ( time_text [ 4 : 6 ] ) tm_day = int ( time_text [ 6 : 8 ] ) tm_hour = int ( time_text [ 8 : 10 ] ) tm_min = int ( time_text [ 10 : 12 ] ) tm_sec = int ( time_text [ 12 : 14 ] ) except ValueError : return None epoch_time = calendar . timegm ( ( tm_year , tm_month , tm_day , tm_hour , tm_min , tm_sec ) ) return epoch_time | Parse a time from an ftp directory listing . |
39,637 | def write_zip ( src_fs , file , compression = zipfile . ZIP_DEFLATED , encoding = "utf-8" , walker = None , ) : _zip = zipfile . ZipFile ( file , mode = "w" , compression = compression , allowZip64 = True ) walker = walker or Walker ( ) with _zip : gen_walk = walker . info ( src_fs , namespaces = [ "details" , "stat" , "access" ] ) for path , info in gen_walk : zip_name = relpath ( path + "/" if info . is_dir else path ) if not six . PY3 : zip_name = zip_name . encode ( encoding , "replace" ) if info . has_namespace ( "stat" ) : st_mtime = info . get ( "stat" , "st_mtime" , None ) _mtime = time . localtime ( st_mtime ) zip_time = _mtime [ 0 : 6 ] else : mt = info . modified or datetime . utcnow ( ) zip_time = ( mt . year , mt . month , mt . day , mt . hour , mt . minute , mt . second ) zip_info = zipfile . ZipInfo ( zip_name , zip_time ) try : if info . permissions is not None : zip_info . external_attr = info . permissions . mode << 16 except MissingInfoNamespace : pass if info . is_dir : zip_info . external_attr |= 0x10 _zip . writestr ( zip_info , b"" ) else : try : sys_path = src_fs . getsyspath ( path ) except NoSysPath : _zip . writestr ( zip_info , src_fs . readbytes ( path ) ) else : _zip . write ( sys_path , zip_name ) | Write the contents of a filesystem to a zip file . |
39,638 | def write_tar ( src_fs , file , compression = None , encoding = "utf-8" , walker = None , ) : type_map = { ResourceType . block_special_file : tarfile . BLKTYPE , ResourceType . character : tarfile . CHRTYPE , ResourceType . directory : tarfile . DIRTYPE , ResourceType . fifo : tarfile . FIFOTYPE , ResourceType . file : tarfile . REGTYPE , ResourceType . socket : tarfile . AREGTYPE , ResourceType . symlink : tarfile . SYMTYPE , ResourceType . unknown : tarfile . AREGTYPE , } tar_attr = [ ( "uid" , "uid" ) , ( "gid" , "gid" ) , ( "uname" , "user" ) , ( "gname" , "group" ) ] mode = "w:{}" . format ( compression or "" ) if isinstance ( file , ( six . text_type , six . binary_type ) ) : _tar = tarfile . open ( file , mode = mode ) else : _tar = tarfile . open ( fileobj = file , mode = mode ) current_time = time . time ( ) walker = walker or Walker ( ) with _tar : gen_walk = walker . info ( src_fs , namespaces = [ "details" , "stat" , "access" ] ) for path , info in gen_walk : tar_name = relpath ( path ) if not six . PY3 : tar_name = tar_name . encode ( encoding , "replace" ) tar_info = tarfile . TarInfo ( tar_name ) if info . has_namespace ( "stat" ) : mtime = info . get ( "stat" , "st_mtime" , current_time ) else : mtime = info . modified or current_time if isinstance ( mtime , datetime ) : mtime = datetime_to_epoch ( mtime ) if isinstance ( mtime , float ) : mtime = int ( mtime ) tar_info . mtime = mtime for tarattr , infoattr in tar_attr : if getattr ( info , infoattr , None ) is not None : setattr ( tar_info , tarattr , getattr ( info , infoattr , None ) ) if info . has_namespace ( "access" ) : tar_info . mode = getattr ( info . permissions , "mode" , 0o420 ) if info . is_dir : tar_info . type = tarfile . DIRTYPE _tar . addfile ( tar_info ) else : tar_info . type = type_map . get ( info . type , tarfile . REGTYPE ) tar_info . size = info . size with src_fs . openbin ( path ) as bin_file : _tar . addfile ( tar_info , bin_file ) | Write the contents of a filesystem to a tar file . |
39,639 | def count_lines ( self ) : lines = 0 non_blank = 0 for path , info in self . _make_iter ( ) : if info . is_file : for line in self . fs . open ( path , "rb" ) : lines += 1 if line . rstrip ( ) : non_blank += 1 return LineCounts ( lines = lines , non_blank = non_blank ) | Count the lines in the matched files . |
39,640 | def remove ( self ) : removes = 0 for path , info in self . _make_iter ( search = "depth" ) : if info . is_dir : self . fs . removetree ( path ) else : self . fs . remove ( path ) removes += 1 return removes | Removed all matched paths . |
39,641 | def move_file ( src_fs , src_path , dst_fs , dst_path , ) : with manage_fs ( src_fs ) as _src_fs : with manage_fs ( dst_fs , create = True ) as _dst_fs : if _src_fs is _dst_fs : _src_fs . move ( src_path , dst_path , overwrite = True ) else : with _src_fs . lock ( ) , _dst_fs . lock ( ) : copy_file ( _src_fs , src_path , _dst_fs , dst_path ) _src_fs . remove ( src_path ) | Move a file from one filesystem to another . |
39,642 | def move_dir ( src_fs , src_path , dst_fs , dst_path , workers = 0 , ) : def src ( ) : return manage_fs ( src_fs , writeable = False ) def dst ( ) : return manage_fs ( dst_fs , create = True ) with src ( ) as _src_fs , dst ( ) as _dst_fs : with _src_fs . lock ( ) , _dst_fs . lock ( ) : _dst_fs . makedir ( dst_path , recreate = True ) copy_dir ( src_fs , src_path , dst_fs , dst_path , workers = workers ) _src_fs . removetree ( src_path ) | Move a directory from one filesystem to another . |
39,643 | def recursepath ( path , reverse = False ) : if path in "/" : return [ "/" ] path = abspath ( normpath ( path ) ) + "/" paths = [ "/" ] find = path . find append = paths . append pos = 1 len_path = len ( path ) while pos < len_path : pos = find ( "/" , pos ) append ( path [ : pos ] ) pos += 1 if reverse : return paths [ : : - 1 ] return paths | Get intermediate paths from the root to the given path . |
39,644 | def join ( * paths ) : absolute = False relpaths = [ ] for p in paths : if p : if p [ 0 ] == "/" : del relpaths [ : ] absolute = True relpaths . append ( p ) path = normpath ( "/" . join ( relpaths ) ) if absolute : path = abspath ( path ) return path | Join any number of paths together . |
39,645 | def combine ( path1 , path2 ) : if not path1 : return path2 . lstrip ( ) return "{}/{}" . format ( path1 . rstrip ( "/" ) , path2 . lstrip ( "/" ) ) | Join two paths together . |
39,646 | def parts ( path ) : _path = normpath ( path ) components = _path . strip ( "/" ) _parts = [ "/" if _path . startswith ( "/" ) else "./" ] if components : _parts += components . split ( "/" ) return _parts | Split a path in to its component parts . |
39,647 | def splitext ( path ) : parent_path , pathname = split ( path ) if pathname . startswith ( "." ) and pathname . count ( "." ) == 1 : return path , "" if "." not in pathname : return path , "" pathname , ext = pathname . rsplit ( "." , 1 ) path = join ( parent_path , pathname ) return path , "." + ext | Split the extension from the path . |
39,648 | def isbase ( path1 , path2 ) : _path1 = forcedir ( abspath ( path1 ) ) _path2 = forcedir ( abspath ( path2 ) ) return _path2 . startswith ( _path1 ) | Check if path1 is a base of path2 . |
39,649 | def isparent ( path1 , path2 ) : bits1 = path1 . split ( "/" ) bits2 = path2 . split ( "/" ) while bits1 and bits1 [ - 1 ] == "" : bits1 . pop ( ) if len ( bits1 ) > len ( bits2 ) : return False for ( bit1 , bit2 ) in zip ( bits1 , bits2 ) : if bit1 != bit2 : return False return True | Check if path1 is a parent directory of path2 . |
39,650 | def frombase ( path1 , path2 ) : if not isparent ( path1 , path2 ) : raise ValueError ( "path1 must be a prefix of path2" ) return path2 [ len ( path1 ) : ] | Get the final path of path2 that isn t in path1 . |
39,651 | def relativefrom ( base , path ) : base_parts = list ( iteratepath ( base ) ) path_parts = list ( iteratepath ( path ) ) common = 0 for component_a , component_b in zip ( base_parts , path_parts ) : if component_a != component_b : break common += 1 return "/" . join ( [ ".." ] * ( len ( base_parts ) - common ) + path_parts [ common : ] ) | Return a path relative from a given base path . |
39,652 | def unwrap_errors ( path_replace ) : try : yield except errors . ResourceError as e : if hasattr ( e , "path" ) : if isinstance ( path_replace , Mapping ) : e . path = path_replace . get ( e . path , e . path ) else : e . path = path_replace reraise ( type ( e ) , e ) | Get a context to map OS errors to their fs . errors counterpart . |
39,653 | def match ( pattern , name ) : try : re_pat = _PATTERN_CACHE [ ( pattern , True ) ] except KeyError : res = "(?ms)" + _translate ( pattern ) + r'\Z' _PATTERN_CACHE [ ( pattern , True ) ] = re_pat = re . compile ( res ) return re_pat . match ( name ) is not None | Test whether a name matches a wildcard pattern . |
39,654 | def match_any ( patterns , name ) : if not patterns : return True return any ( match ( pattern , name ) for pattern in patterns ) | Test if a name matches any of a list of patterns . |
39,655 | def get_matcher ( patterns , case_sensitive ) : if not patterns : return lambda name : True if case_sensitive : return partial ( match_any , patterns ) else : return partial ( imatch_any , patterns ) | Get a callable that matches names against the given patterns . |
39,656 | def _translate ( pattern , case_sensitive = True ) : if not case_sensitive : pattern = pattern . lower ( ) i , n = 0 , len ( pattern ) res = "" while i < n : c = pattern [ i ] i = i + 1 if c == "*" : res = res + "[^/]*" elif c == "?" : res = res + "." elif c == "[" : j = i if j < n and pattern [ j ] == "!" : j = j + 1 if j < n and pattern [ j ] == "]" : j = j + 1 while j < n and pattern [ j ] != "]" : j = j + 1 if j >= n : res = res + "\\[" else : stuff = pattern [ i : j ] . replace ( "\\" , "\\\\" ) i = j + 1 if stuff [ 0 ] == "!" : stuff = "^" + stuff [ 1 : ] elif stuff [ 0 ] == "^" : stuff = "\\" + stuff res = "%s[%s]" % ( res , stuff ) else : res = res + re . escape ( c ) return res | Translate a wildcard pattern to a regular expression . |
39,657 | def _delegate ( self , path ) : _path = forcedir ( abspath ( normpath ( path ) ) ) is_mounted = _path . startswith for mount_path , fs in self . mounts : if is_mounted ( mount_path ) : return fs , _path [ len ( mount_path ) : ] . rstrip ( "/" ) return self . default_fs , path | Get the delegate FS for a given path . |
39,658 | def mount ( self , path , fs ) : if isinstance ( fs , text_type ) : from . opener import open_fs fs = open_fs ( fs ) if not isinstance ( fs , FS ) : raise TypeError ( "fs argument must be an FS object or a FS URL" ) if fs is self : raise ValueError ( "Unable to mount self" ) _path = forcedir ( abspath ( normpath ( path ) ) ) for mount_path , _ in self . mounts : if _path . startswith ( mount_path ) : raise MountError ( "mount point overlaps existing mount" ) self . mounts . append ( ( _path , fs ) ) self . default_fs . makedirs ( _path , recreate = True ) | Mounts a host FS object on a given path . |
39,659 | def start ( self ) : if self . num_workers : self . queue = Queue ( maxsize = self . num_workers ) self . workers = [ _Worker ( self ) for _ in range ( self . num_workers ) ] for worker in self . workers : worker . start ( ) self . running = True | Start the workers . |
39,660 | def copy ( self , src_fs , src_path , dst_fs , dst_path ) : if self . queue is None : copy_file_internal ( src_fs , src_path , dst_fs , dst_path ) else : src_file = src_fs . openbin ( src_path , "r" ) try : dst_file = dst_fs . openbin ( dst_path , "w" ) except Exception : src_file . close ( ) raise task = _CopyTask ( src_file , dst_file ) self . queue . put ( task ) | Copy a file from one fs to another . |
39,661 | def add_fs ( self , name , fs , write = False , priority = 0 ) : if isinstance ( fs , text_type ) : fs = open_fs ( fs ) if not isinstance ( fs , FS ) : raise TypeError ( "fs argument should be an FS object or FS URL" ) self . _filesystems [ name ] = _PrioritizedFS ( priority = ( priority , self . _sort_index ) , fs = fs ) self . _sort_index += 1 self . _resort ( ) if write : self . write_fs = fs self . _write_fs_name = name | Add a filesystem to the MultiFS . |
39,662 | def _delegate ( self , path ) : for _name , fs in self . iterate_fs ( ) : if fs . exists ( path ) : return fs return None | Get a filesystem which has a given path . |
39,663 | def _delegate_required ( self , path ) : fs = self . _delegate ( path ) if fs is None : raise errors . ResourceNotFound ( path ) return fs | Check that there is a filesystem with the given path . |
39,664 | def _writable_required ( self , path ) : if self . write_fs is None : raise errors . ResourceReadOnly ( path ) return self . write_fs | Check that path is writeable . |
39,665 | def make_stream ( name , bin_file , mode = "r" , buffering = - 1 , encoding = None , errors = None , newline = "" , line_buffering = False , ** kwargs ) : reading = "r" in mode writing = "w" in mode appending = "a" in mode binary = "b" in mode if "+" in mode : reading = True writing = True encoding = None if binary else ( encoding or "utf-8" ) io_object = RawWrapper ( bin_file , mode = mode , name = name ) if buffering >= 0 : if reading and writing : io_object = io . BufferedRandom ( typing . cast ( io . RawIOBase , io_object ) , buffering or io . DEFAULT_BUFFER_SIZE , ) elif reading : io_object = io . BufferedReader ( typing . cast ( io . RawIOBase , io_object ) , buffering or io . DEFAULT_BUFFER_SIZE , ) elif writing or appending : io_object = io . BufferedWriter ( typing . cast ( io . RawIOBase , io_object ) , buffering or io . DEFAULT_BUFFER_SIZE , ) if not binary : io_object = io . TextIOWrapper ( io_object , encoding = encoding , errors = errors , newline = newline , line_buffering = line_buffering , ) return io_object | Take a Python 2 . x binary file and return an IO Stream . |
39,666 | def line_iterator ( readable_file , size = None ) : read = readable_file . read line = [ ] byte = b"1" if size is None or size < 0 : while byte : byte = read ( 1 ) line . append ( byte ) if byte in b"\n" : yield b"" . join ( line ) del line [ : ] else : while byte and size : byte = read ( 1 ) size -= len ( byte ) line . append ( byte ) if byte in b"\n" or not size : yield b"" . join ( line ) del line [ : ] | Iterate over the lines of a file . |
39,667 | def validate_openbin_mode ( mode , _valid_chars = frozenset ( "rwxab+" ) ) : if "t" in mode : raise ValueError ( "text mode not valid in openbin" ) if not mode : raise ValueError ( "mode must not be empty" ) if mode [ 0 ] not in "rwxa" : raise ValueError ( "mode must start with 'r', 'w', 'a' or 'x'" ) if not _valid_chars . issuperset ( mode ) : raise ValueError ( "mode '{}' contains invalid characters" . format ( mode ) ) | Check mode parameter of ~fs . base . FS . openbin is valid . |
39,668 | def _compare ( info1 , info2 ) : if info1 . size != info2 . size : return True date1 = info1 . modified date2 = info2 . modified return date1 is None or date2 is None or date1 > date2 | Compare two Info objects to see if they should be copied . |
39,669 | def parse_fs_url ( fs_url ) : match = _RE_FS_URL . match ( fs_url ) if match is None : raise ParseError ( "{!r} is not a fs2 url" . format ( fs_url ) ) fs_name , credentials , url1 , url2 , path = match . groups ( ) if not credentials : username = None password = None url = url2 else : username , _ , password = credentials . partition ( ":" ) username = unquote ( username ) password = unquote ( password ) url = url1 url , has_qs , qs = url . partition ( "?" ) resource = unquote ( url ) if has_qs : _params = parse_qs ( qs , keep_blank_values = True ) params = { k : unquote ( v [ 0 ] ) for k , v in six . iteritems ( _params ) } else : params = { } return ParseResult ( fs_name , username , password , resource , params , path ) | Parse a Filesystem URL and return a ParseResult . |
39,670 | def seek ( self , offset , whence = Seek . set ) : _whence = int ( whence ) if _whence == Seek . current : offset += self . _pos if _whence == Seek . current or _whence == Seek . set : if offset < 0 : raise ValueError ( "Negative seek position {}" . format ( offset ) ) elif _whence == Seek . end : if offset > 0 : raise ValueError ( "Positive seek position {}" . format ( offset ) ) offset += self . _end else : raise ValueError ( "Invalid whence ({}, should be {}, {} or {})" . format ( _whence , Seek . set , Seek . current , Seek . end ) ) if offset < self . _pos : self . _f = self . _zip . open ( self . name ) self . _pos = 0 self . read ( offset - self . _pos ) return self . _pos | Change stream position . |
39,671 | def _iter_walk ( self , fs , path , namespaces = None , ) : if self . search == "breadth" : return self . _walk_breadth ( fs , path , namespaces = namespaces ) else : return self . _walk_depth ( fs , path , namespaces = namespaces ) | Get the walk generator . |
39,672 | def _check_open_dir ( self , fs , path , info ) : if self . exclude_dirs is not None and fs . match ( self . exclude_dirs , info . name ) : return False if self . filter_dirs is not None and not fs . match ( self . filter_dirs , info . name ) : return False return self . check_open_dir ( fs , path , info ) | Check if a directory should be considered in the walk . |
39,673 | def _check_scan_dir ( self , fs , path , info , depth ) : if self . max_depth is not None and depth >= self . max_depth : return False return self . check_scan_dir ( fs , path , info ) | Check if a directory contents should be scanned . |
39,674 | def check_file ( self , fs , info ) : if self . exclude is not None and fs . match ( self . exclude , info . name ) : return False return fs . match ( self . filter , info . name ) | Check if a filename should be included . |
39,675 | def _scan ( self , fs , dir_path , namespaces = None , ) : try : for info in fs . scandir ( dir_path , namespaces = namespaces ) : yield info except FSError as error : if not self . on_error ( dir_path , error ) : six . reraise ( type ( error ) , error ) | Get an iterator of Info objects for a directory path . |
39,676 | def _make_walker ( self , * args , ** kwargs ) : walker = self . walker_class ( * args , ** kwargs ) return walker | Create a walker instance . |
39,677 | def dirs ( self , path = "/" , ** kwargs ) : walker = self . _make_walker ( ** kwargs ) return walker . dirs ( self . fs , path = path ) | Walk a filesystem yielding absolute paths to directories . |
39,678 | def info ( self , path = "/" , namespaces = None , ** kwargs ) : walker = self . _make_walker ( ** kwargs ) return walker . info ( self . fs , path = path , namespaces = namespaces ) | Walk a filesystem yielding path and Info of resources . |
39,679 | def remove_empty ( fs , path ) : path = abspath ( normpath ( path ) ) try : while path not in ( "" , "/" ) : fs . removedir ( path ) path = dirname ( path ) except DirectoryNotEmpty : pass | Remove all empty parents . |
39,680 | def copy_file_data ( src_file , dst_file , chunk_size = None ) : _chunk_size = 1024 * 1024 if chunk_size is None else chunk_size read = src_file . read write = dst_file . write for chunk in iter ( lambda : read ( _chunk_size ) or None , None ) : write ( chunk ) | Copy data from one file object to another . |
39,681 | def get_intermediate_dirs ( fs , dir_path ) : intermediates = [ ] with fs . lock ( ) : for path in recursepath ( abspath ( dir_path ) , reverse = True ) : try : resource = fs . getinfo ( path ) except ResourceNotFound : intermediates . append ( abspath ( path ) ) else : if resource . is_dir : break raise errors . DirectoryExpected ( dir_path ) return intermediates [ : : - 1 ] [ : - 1 ] | Get a list of non - existing intermediate directories . |
39,682 | def prettify_json ( json_string ) : try : data = json . loads ( json_string ) html = '<pre>' + json . dumps ( data , sort_keys = True , indent = 4 ) + '</pre>' except : html = json_string return mark_safe ( html ) | Given a JSON string it returns it as a safe formatted HTML |
39,683 | def purge_objects ( self , request ) : def truncate_table ( model ) : if settings . TRUNCATE_TABLE_SQL_STATEMENT : from django . db import connection sql = settings . TRUNCATE_TABLE_SQL_STATEMENT . format ( db_table = model . _meta . db_table ) cursor = connection . cursor ( ) cursor . execute ( sql ) else : model . objects . all ( ) . delete ( ) modeladmin = self opts = modeladmin . model . _meta if not request . user . is_superuser : raise PermissionDenied if not modeladmin . has_delete_permission ( request ) : raise PermissionDenied if request . method == 'POST' : if 'btn-confirm' in request . POST : try : n = modeladmin . model . objects . count ( ) truncate_table ( modeladmin . model ) modeladmin . message_user ( request , _ ( "Successfully removed %d rows" % n ) , messages . SUCCESS ) except Exception as e : modeladmin . message_user ( request , _ ( u'ERROR' ) + ': %r' % e , messages . ERROR ) else : modeladmin . message_user ( request , _ ( "Action cancelled by user" ) , messages . SUCCESS ) return HttpResponseRedirect ( reverse ( 'admin:%s_%s_changelist' % ( opts . app_label , opts . model_name ) ) ) context = { "title" : _ ( "Purge all %s ... are you sure?" ) % opts . verbose_name_plural , "opts" : opts , "app_label" : opts . app_label , } return render ( request , 'admin/easyaudit/purge_confirmation.html' , context ) | Removes all objects in this table . This action first displays a confirmation page ; next it deletes all objects and redirects back to the change list . |
39,684 | def get_model_list ( class_list ) : for idx , item in enumerate ( class_list ) : if isinstance ( item , six . string_types ) : model_class = apps . get_model ( item ) class_list [ idx ] = model_class | Receives a list of strings with app_name . model_name format and turns them into classes . If an item is already a class it ignores it . |
39,685 | def should_audit ( instance ) : for unregistered_class in UNREGISTERED_CLASSES : if isinstance ( instance , unregistered_class ) : return False if len ( REGISTERED_CLASSES ) > 0 : for registered_class in REGISTERED_CLASSES : if isinstance ( instance , registered_class ) : break else : return False return True | Returns True or False to indicate whether the instance should be audited or not depending on the project settings . |
39,686 | def _m2m_rev_field_name ( model1 , model2 ) : m2m_field_names = [ rel . get_accessor_name ( ) for rel in model1 . _meta . get_fields ( ) if rel . many_to_many and rel . auto_created and rel . related_model == model2 ] return m2m_field_names [ 0 ] | Gets the name of the reverse m2m accessor from model1 to model2 |
39,687 | def new_query ( ) : N . nvmlInit ( ) def _decode ( b ) : if isinstance ( b , bytes ) : return b . decode ( ) return b def get_gpu_info ( handle ) : def get_process_info ( nv_process ) : process = { } ps_process = psutil . Process ( pid = nv_process . pid ) process [ 'username' ] = ps_process . username ( ) _cmdline = ps_process . cmdline ( ) if not _cmdline : process [ 'command' ] = '?' else : process [ 'command' ] = os . path . basename ( _cmdline [ 0 ] ) process [ 'gpu_memory_usage' ] = nv_process . usedGpuMemory // MB process [ 'pid' ] = nv_process . pid return process name = _decode ( N . nvmlDeviceGetName ( handle ) ) uuid = _decode ( N . nvmlDeviceGetUUID ( handle ) ) try : temperature = N . nvmlDeviceGetTemperature ( handle , N . NVML_TEMPERATURE_GPU ) except N . NVMLError : temperature = None try : memory = N . nvmlDeviceGetMemoryInfo ( handle ) except N . NVMLError : memory = None try : utilization = N . nvmlDeviceGetUtilizationRates ( handle ) except N . NVMLError : utilization = None try : power = N . nvmlDeviceGetPowerUsage ( handle ) except N . NVMLError : power = None try : power_limit = N . nvmlDeviceGetEnforcedPowerLimit ( handle ) except N . NVMLError : power_limit = None try : nv_comp_processes = N . nvmlDeviceGetComputeRunningProcesses ( handle ) except N . NVMLError : nv_comp_processes = None try : nv_graphics_processes = N . nvmlDeviceGetGraphicsRunningProcesses ( handle ) except N . NVMLError : nv_graphics_processes = None if nv_comp_processes is None and nv_graphics_processes is None : processes = None else : processes = [ ] nv_comp_processes = nv_comp_processes or [ ] nv_graphics_processes = nv_graphics_processes or [ ] for nv_process in nv_comp_processes + nv_graphics_processes : try : process = get_process_info ( nv_process ) processes . append ( process ) except psutil . NoSuchProcess : pass index = N . nvmlDeviceGetIndex ( handle ) gpu_info = { 'index' : index , 'uuid' : uuid , 'name' : name , 'temperature.gpu' : temperature , 'utilization.gpu' : utilization . gpu if utilization else None , 'power.draw' : power // 1000 if power is not None else None , 'enforced.power.limit' : power_limit // 1000 if power_limit is not None else None , 'memory.used' : memory . used // MB if memory else None , 'memory.total' : memory . total // MB if memory else None , 'processes' : processes , } return gpu_info gpu_list = [ ] device_count = N . nvmlDeviceGetCount ( ) for index in range ( device_count ) : handle = N . nvmlDeviceGetHandleByIndex ( index ) gpu_info = get_gpu_info ( handle ) gpu_stat = GPUStat ( gpu_info ) gpu_list . append ( gpu_stat ) try : driver_version = _decode ( N . nvmlSystemGetDriverVersion ( ) ) except N . NVMLError : driver_version = None N . nvmlShutdown ( ) return GPUStatCollection ( gpu_list , driver_version = driver_version ) | Query the information of all the GPUs on local machine |
39,688 | def print_gpustat ( json = False , debug = False , ** kwargs ) : try : gpu_stats = GPUStatCollection . new_query ( ) except Exception as e : sys . stderr . write ( 'Error on querying NVIDIA devices.' ' Use --debug flag for details\n' ) if debug : try : import traceback traceback . print_exc ( file = sys . stderr ) except Exception : raise e sys . exit ( 1 ) if json : gpu_stats . print_json ( sys . stdout ) else : gpu_stats . print_formatted ( sys . stdout , ** kwargs ) | Display the GPU query results into standard output . |
39,689 | def fetch_list ( cls , client , ids ) : results = [ ] request_url = "https://api.robinhood.com/options/instruments/" for _ids in chunked_list ( ids , 50 ) : params = { "ids" : "," . join ( _ids ) } data = client . get ( request_url , params = params ) partial_results = data [ "results" ] while data [ "next" ] : data = client . get ( data [ "next" ] ) partial_results . extend ( data [ "results" ] ) results . extend ( partial_results ) return results | fetch instruments by ids |
39,690 | def in_chain ( cls , client , chain_id , expiration_dates = [ ] ) : request_url = "https://api.robinhood.com/options/instruments/" params = { "chain_id" : chain_id , "expiration_dates" : "," . join ( expiration_dates ) } data = client . get ( request_url , params = params ) results = data [ 'results' ] while data [ 'next' ] : data = client . get ( data [ 'next' ] ) results . extend ( data [ 'results' ] ) return results | fetch all option instruments in an options chain - expiration_dates = optionally scope |
39,691 | def generate_by_deltas ( cls , options , width , put_inner_lte_delta , call_inner_lte_delta ) : raise Exception ( "Not Implemented starting at the 0.3.0 release" ) put_options_unsorted = list ( filter ( lambda x : x [ 'type' ] == 'put' , options ) ) put_options = cls . sort_by_strike_price ( put_options_unsorted ) deltas_as_strings = [ x [ 'delta' ] for x in put_options ] deltas = cls . strings_to_np_array ( deltas_as_strings ) put_inner_index = np . argmin ( deltas >= put_inner_lte_delta ) - 1 put_outer_index = put_inner_index - width put_inner_leg = cls . gen_leg ( put_options [ put_inner_index ] [ "instrument" ] , "sell" ) put_outer_leg = cls . gen_leg ( put_options [ put_outer_index ] [ "instrument" ] , "buy" ) call_options_unsorted = list ( filter ( lambda x : x [ 'type' ] == 'call' , options ) ) call_options = cls . sort_by_strike_price ( call_options_unsorted ) deltas_as_strings = [ x [ 'delta' ] for x in call_options ] x = np . array ( deltas_as_strings ) deltas = x . astype ( np . float ) where_are_NaNs = np . isnan ( deltas ) deltas [ where_are_NaNs ] = 1.0 call_inner_index = np . argmax ( deltas <= call_inner_lte_delta ) call_outer_index = call_inner_index + width call_inner_leg = cls . gen_leg ( call_options [ call_inner_index ] [ "instrument" ] , "sell" ) call_outer_leg = cls . gen_leg ( call_options [ call_outer_index ] [ "instrument" ] , "buy" ) legs = [ put_outer_leg , put_inner_leg , call_inner_leg , call_outer_leg ] price = ( - Decimal ( put_options [ put_outer_index ] [ 'adjusted_mark_price' ] ) + Decimal ( put_options [ put_inner_index ] [ 'adjusted_mark_price' ] ) + Decimal ( call_options [ call_inner_index ] [ 'adjusted_mark_price' ] ) - Decimal ( call_options [ call_outer_index ] [ 'adjusted_mark_price' ] ) ) ic_options = [ put_options [ put_outer_index ] , put_options [ put_inner_index ] , call_options [ call_inner_index ] , call_options [ call_outer_index ] ] max_bid_ask_spread = cls . max_bid_ask_spread ( ic_options ) return { "legs" : legs , "price" : price , "max_bid_ask_spread" : max_bid_ask_spread } | totally just playing around ideas for the API . |
39,692 | def fetch ( cls , client , _id , symbol ) : url = "https://api.robinhood.com/options/chains/" params = { "equity_instrument_ids" : _id , "state" : "active" , "tradability" : "tradable" } data = client . get ( url , params = params ) def filter_func ( x ) : return x [ "symbol" ] == symbol results = list ( filter ( filter_func , data [ "results" ] ) ) return results [ 0 ] | fetch option chain for instrument |
39,693 | def authenticate ( self ) : if "username" in self . options and "password" in self . options : self . login_oauth2 ( self . options [ "username" ] , self . options [ "password" ] , self . options . get ( 'mfa_code' ) ) elif "access_token" in self . options : if "refresh_token" in self . options : self . access_token = self . options [ "access_token" ] self . refresh_token = self . options [ "refresh_token" ] self . __set_account_info ( ) else : self . authenticated = False return self . authenticated | Authenticate using data in options |
39,694 | def get ( self , url = None , params = None , retry = True ) : headers = self . _gen_headers ( self . access_token , url ) attempts = 1 while attempts <= HTTP_ATTEMPTS_MAX : try : res = requests . get ( url , headers = headers , params = params , timeout = 15 , verify = self . certs ) res . raise_for_status ( ) return res . json ( ) except requests . exceptions . RequestException as e : attempts += 1 if res . status_code in [ 400 ] : raise e elif retry and res . status_code in [ 403 ] : self . relogin_oauth2 ( ) | Execute HTTP GET |
39,695 | def _gen_headers ( self , bearer , url ) : headers = { "Accept" : "*/*" , "Accept-Encoding" : "gzip, deflate" , "Accept-Language" : ( "en;q=1, fr;q=0.9, de;q=0.8, ja;q=0.7, " + "nl;q=0.6, it;q=0.5" ) , "User-Agent" : ( "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) " + "AppleWebKit/537.36 (KHTML, like Gecko) " + "Chrome/68.0.3440.106 Safari/537.36" ) , } if bearer : headers [ "Authorization" ] = "Bearer {0}" . format ( bearer ) if url == "https://api.robinhood.com/options/orders/" : headers [ "Content-Type" ] = "application/json; charset=utf-8" return headers | Generate headders adding in Oauth2 bearer token if present |
39,696 | def logout_oauth2 ( self ) : url = "https://api.robinhood.com/oauth2/revoke_token/" data = { "client_id" : CLIENT_ID , "token" : self . refresh_token , } res = self . post ( url , payload = data ) if res is None : self . account_id = None self . account_url = None self . access_token = None self . refresh_token = None self . mfa_code = None self . scope = None self . authenticated = False return True else : raise AuthenticationError ( "fast_arrow could not log out." ) | Logout for given Oauth2 bearer token |
39,697 | def fetch ( cls , client , symbol ) : assert ( type ( symbol ) is str ) url = ( "https://api.robinhood.com/instruments/?symbol={0}" . format ( symbol ) ) data = client . get ( url ) return data [ "results" ] [ 0 ] | fetch data for stock |
39,698 | def gen_df ( cls , options , width , spread_type = "call" , spread_kind = "buy" ) : assert type ( width ) is int assert spread_type in [ "call" , "put" ] assert spread_kind in [ "buy" , "sell" ] options = list ( filter ( lambda x : x [ "type" ] == spread_type , options ) ) coef = ( 1 if spread_type == "put" else - 1 ) shift = width * coef df = pd . DataFrame . from_dict ( options ) df [ 'expiration_date' ] = pd . to_datetime ( df [ 'expiration_date' ] , format = "%Y-%m-%d" ) df [ 'adjusted_mark_price' ] = pd . to_numeric ( df [ 'adjusted_mark_price' ] ) df [ 'strike_price' ] = pd . to_numeric ( df [ 'strike_price' ] ) df . sort_values ( [ "expiration_date" , "strike_price" ] , inplace = True ) for k , v in df . groupby ( "expiration_date" ) : sdf = v . shift ( shift ) df . loc [ v . index , "strike_price_shifted" ] = sdf [ "strike_price" ] df . loc [ v . index , "delta_shifted" ] = sdf [ "delta" ] df . loc [ v . index , "volume_shifted" ] = sdf [ "volume" ] df . loc [ v . index , "open_interest_shifted" ] = sdf [ "open_interest" ] df . loc [ v . index , "instrument_shifted" ] = sdf [ "instrument" ] df . loc [ v . index , "adjusted_mark_price_shift" ] = sdf [ "adjusted_mark_price" ] if spread_kind == "sell" : df . loc [ v . index , "margin" ] = abs ( sdf [ "strike_price" ] - v [ "strike_price" ] ) else : df . loc [ v . index , "margin" ] = 0.0 if spread_kind == "buy" : df . loc [ v . index , "premium_adjusted_mark_price" ] = ( v [ "adjusted_mark_price" ] - sdf [ "adjusted_mark_price" ] ) elif spread_kind == "sell" : df . loc [ v . index , "premium_adjusted_mark_price" ] = ( sdf [ "adjusted_mark_price" ] - v [ "adjusted_mark_price" ] ) return df | Generate Pandas Dataframe of Vertical |
39,699 | def quote_by_instruments ( cls , client , ids ) : base_url = "https://api.robinhood.com/instruments" id_urls = [ "{}/{}/" . format ( base_url , _id ) for _id in ids ] return cls . quotes_by_instrument_urls ( client , id_urls ) | create instrument urls fetch return results |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.