idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
247,600
def getSkeletalTrackingLevel ( self , action ) : fn = self . function_table . getSkeletalTrackingLevel pSkeletalTrackingLevel = EVRSkeletalTrackingLevel ( ) result = fn ( action , byref ( pSkeletalTrackingLevel ) ) return result , pSkeletalTrackingLevel
Reads the level of accuracy to which the controller is able to track the user to recreate a skeletal pose
74
21
247,601
def getSkeletalBoneData ( self , action , eTransformSpace , eMotionRange , unTransformArrayCount ) : fn = self . function_table . getSkeletalBoneData pTransformArray = VRBoneTransform_t ( ) result = fn ( action , eTransformSpace , eMotionRange , byref ( pTransformArray ) , unTransformArrayCount ) return result , pTransformArray
Reads the state of the skeletal bone data associated with this action and copies it into the given buffer .
84
21
247,602
def getSkeletalSummaryData ( self , action ) : fn = self . function_table . getSkeletalSummaryData pSkeletalSummaryData = VRSkeletalSummaryData_t ( ) result = fn ( action , byref ( pSkeletalSummaryData ) ) return result , pSkeletalSummaryData
Reads summary information about the current pose of the skeleton associated with the given action .
70
17
247,603
def decompressSkeletalBoneData ( self , pvCompressedBuffer , unCompressedBufferSize , eTransformSpace , unTransformArrayCount ) : fn = self . function_table . decompressSkeletalBoneData pTransformArray = VRBoneTransform_t ( ) result = fn ( pvCompressedBuffer , unCompressedBufferSize , eTransformSpace , byref ( pTransformArray ) , unTransformArrayCount ) return result , pTransformArray
Turns a compressed buffer from GetSkeletalBoneDataCompressed and turns it back into a bone transform array .
98
24
247,604
def triggerHapticVibrationAction ( self , action , fStartSecondsFromNow , fDurationSeconds , fFrequency , fAmplitude , ulRestrictToDevice ) : fn = self . function_table . triggerHapticVibrationAction result = fn ( action , fStartSecondsFromNow , fDurationSeconds , fFrequency , fAmplitude , ulRestrictToDevice ) return result
Triggers a haptic event as described by the specified action
91
13
247,605
def getActionOrigins ( self , actionSetHandle , digitalActionHandle , originOutCount ) : fn = self . function_table . getActionOrigins originsOut = VRInputValueHandle_t ( ) result = fn ( actionSetHandle , digitalActionHandle , byref ( originsOut ) , originOutCount ) return result , originsOut
Retrieve origin handles for an action
72
7
247,606
def getOriginLocalizedName ( self , origin , pchNameArray , unNameArraySize , unStringSectionsToInclude ) : fn = self . function_table . getOriginLocalizedName result = fn ( origin , pchNameArray , unNameArraySize , unStringSectionsToInclude ) return result
Retrieves the name of the origin in the current language . unStringSectionsToInclude is a bitfield of values in EVRInputStringBits that allows the application to specify which parts of the origin s information it wants a string for .
71
53
247,607
def getOriginTrackedDeviceInfo ( self , origin , unOriginInfoSize ) : fn = self . function_table . getOriginTrackedDeviceInfo pOriginInfo = InputOriginInfo_t ( ) result = fn ( origin , byref ( pOriginInfo ) , unOriginInfoSize ) return result , pOriginInfo
Retrieves useful information for the origin of this action
68
11
247,608
def showActionOrigins ( self , actionSetHandle , ulActionHandle ) : fn = self . function_table . showActionOrigins result = fn ( actionSetHandle , ulActionHandle ) return result
Shows the current binding for the action in - headset
43
11
247,609
def showBindingsForActionSet ( self , unSizeOfVRSelectedActionSet_t , unSetCount , originToHighlight ) : fn = self . function_table . showBindingsForActionSet pSets = VRActiveActionSet_t ( ) result = fn ( byref ( pSets ) , unSizeOfVRSelectedActionSet_t , unSetCount , originToHighlight ) return result , pSets
Shows the current binding all the actions in the specified action sets
96
13
247,610
def open ( self , pchPath , mode , unElementSize , unElements ) : fn = self . function_table . open pulBuffer = IOBufferHandle_t ( ) result = fn ( pchPath , mode , unElementSize , unElements , byref ( pulBuffer ) ) return result , pulBuffer
opens an existing or creates a new IOBuffer of unSize bytes
70
14
247,611
def close ( self , ulBuffer ) : fn = self . function_table . close result = fn ( ulBuffer ) return result
closes a previously opened or created buffer
27
8
247,612
def propertyContainer ( self , ulBuffer ) : fn = self . function_table . propertyContainer result = fn ( ulBuffer ) return result
retrieves the property container of an buffer .
29
10
247,613
def hasReaders ( self , ulBuffer ) : fn = self . function_table . hasReaders result = fn ( ulBuffer ) return result
inexpensively checks for readers to allow writers to fast - fail potentially expensive copies and writes .
31
20
247,614
async def execute ( self , sql , * params ) : if self . _echo : logger . info ( sql ) logger . info ( "%r" , sql ) await self . _run_operation ( self . _impl . execute , sql , * params ) return self
Executes the given operation substituting any markers with the given parameters .
57
14
247,615
def executemany ( self , sql , * params ) : fut = self . _run_operation ( self . _impl . executemany , sql , * params ) return fut
Prepare a database query or command and then execute it against all parameter sequences found in the sequence seq_of_params .
38
25
247,616
def fetchmany ( self , size ) : fut = self . _run_operation ( self . _impl . fetchmany , size ) return fut
Returns a list of remaining rows containing no more than size rows used to process results in chunks . The list will be empty when there are no more rows .
30
31
247,617
def tables ( self , * * kw ) : fut = self . _run_operation ( self . _impl . tables , * * kw ) return fut
Creates a result set of tables in the database that match the given criteria .
34
16
247,618
def columns ( self , * * kw ) : fut = self . _run_operation ( self . _impl . columns , * * kw ) return fut
Creates a results set of column names in specified tables by executing the ODBC SQLColumns function . Each row fetched has the following columns .
34
30
247,619
def statistics ( self , catalog = None , schema = None , unique = False , quick = True ) : fut = self . _run_operation ( self . _impl . statistics , catalog = catalog , schema = schema , unique = unique , quick = quick ) return fut
Creates a results set of statistics about a single table and the indexes associated with the table by executing SQLStatistics .
56
23
247,620
def rowIdColumns ( self , table , catalog = None , schema = None , # nopep8 nullable = True ) : fut = self . _run_operation ( self . _impl . rowIdColumns , table , catalog = catalog , schema = schema , nullable = nullable ) return fut
Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a result set of columns that uniquely identify a row
66
27
247,621
def primaryKeys ( self , table , catalog = None , schema = None ) : # nopep8 fut = self . _run_operation ( self . _impl . primaryKeys , table , catalog = catalog , schema = schema ) return fut
Creates a result set of column names that make up the primary key for a table by executing the SQLPrimaryKeys function .
51
25
247,622
def getTypeInfo ( self , sql_type ) : # nopep8 fut = self . _run_operation ( self . _impl . getTypeInfo , sql_type ) return fut
Executes SQLGetTypeInfo a creates a result set with information about the specified data type or all data types supported by the ODBC driver if not specified .
41
32
247,623
def procedures ( self , * a , * * kw ) : fut = self . _run_operation ( self . _impl . procedures , * a , * * kw ) return fut
Executes SQLProcedures and creates a result set of information about the procedures in the data source .
40
21
247,624
async def dataSources ( loop = None , executor = None ) : loop = loop or asyncio . get_event_loop ( ) sources = await loop . run_in_executor ( executor , _dataSources ) return sources
Returns a dictionary mapping available DSNs to their descriptions .
51
12
247,625
def connect ( * , dsn , autocommit = False , ansi = False , timeout = 0 , loop = None , executor = None , echo = False , after_created = None , * * kwargs ) : return _ContextManager ( _connect ( dsn = dsn , autocommit = autocommit , ansi = ansi , timeout = timeout , loop = loop , executor = executor , echo = echo , after_created = after_created , * * kwargs ) )
Accepts an ODBC connection string and returns a new Connection object .
113
14
247,626
async def close ( self ) : if not self . _conn : return c = await self . _execute ( self . _conn . close ) self . _conn = None return c
Close pyodbc connection
39
5
247,627
async def execute ( self , sql , * args ) : _cursor = await self . _execute ( self . _conn . execute , sql , * args ) connection = self cursor = Cursor ( _cursor , connection , echo = self . _echo ) return cursor
Create a new Cursor object call its execute method and return it .
58
14
247,628
def getinfo ( self , type_ ) : fut = self . _execute ( self . _conn . getinfo , type_ ) return fut
Returns general information about the driver and data source associated with a connection by calling SQLGetInfo and returning its results . See Microsoft s SQLGetInfo documentation for the types of information available .
30
37
247,629
def add_output_converter ( self , sqltype , func ) : fut = self . _execute ( self . _conn . add_output_converter , sqltype , func ) return fut
Register an output converter function that will be called whenever a value with the given SQL type is read from the database .
44
23
247,630
def set_attr ( self , attr_id , value ) : fut = self . _execute ( self . _conn . set_attr , attr_id , value ) return fut
Calls SQLSetConnectAttr with the given values .
40
13
247,631
def _request_get ( self , path , params = None , json = True , url = BASE_URL ) : url = urljoin ( url , path ) headers = self . _get_request_headers ( ) response = requests . get ( url , params = params , headers = headers ) if response . status_code >= 500 : backoff = self . _initial_backoff for _ in range ( self . _max_retries ) : time . sleep ( backoff ) backoff_response = requests . get ( url , params = params , headers = headers , timeout = DEFAULT_TIMEOUT ) if backoff_response . status_code < 500 : response = backoff_response break backoff *= 2 response . raise_for_status ( ) if json : return response . json ( ) else : return response
Perform a HTTP GET request .
175
7
247,632
def _request_post ( self , path , data = None , params = None , url = BASE_URL ) : url = urljoin ( url , path ) headers = self . _get_request_headers ( ) response = requests . post ( url , json = data , params = params , headers = headers , timeout = DEFAULT_TIMEOUT ) response . raise_for_status ( ) if response . status_code == 200 : return response . json ( )
Perform a HTTP POST request ..
98
7
247,633
def _request_delete ( self , path , params = None , url = BASE_URL ) : url = urljoin ( url , path ) headers = self . _get_request_headers ( ) response = requests . delete ( url , params = params , headers = headers , timeout = DEFAULT_TIMEOUT ) response . raise_for_status ( ) if response . status_code == 200 : return response . json ( )
Perform a HTTP DELETE request .
90
9
247,634
def parse ( cls , signed_request , application_secret_key ) : def decode ( encoded ) : padding = '=' * ( len ( encoded ) % 4 ) return base64 . urlsafe_b64decode ( encoded + padding ) try : encoded_signature , encoded_payload = ( str ( string ) for string in signed_request . split ( '.' , 2 ) ) signature = decode ( encoded_signature ) signed_request_data = json . loads ( decode ( encoded_payload ) . decode ( 'utf-8' ) ) except ( TypeError , ValueError ) : raise SignedRequestError ( "Signed request had a corrupt payload" ) if signed_request_data . get ( 'algorithm' , '' ) . upper ( ) != 'HMAC-SHA256' : raise SignedRequestError ( "Signed request is using an unknown algorithm" ) expected_signature = hmac . new ( application_secret_key . encode ( 'utf-8' ) , msg = encoded_payload . encode ( 'utf-8' ) , digestmod = hashlib . sha256 ) . digest ( ) if signature != expected_signature : raise SignedRequestError ( "Signed request signature mismatch" ) return signed_request_data
Parse a signed request returning a dictionary describing its payload .
272
12
247,635
def generate ( self ) : payload = { 'algorithm' : 'HMAC-SHA256' } if self . data : payload [ 'app_data' ] = self . data if self . page : payload [ 'page' ] = { } if self . page . id : payload [ 'page' ] [ 'id' ] = self . page . id if self . page . is_liked : payload [ 'page' ] [ 'liked' ] = self . page . is_liked if self . page . is_admin : payload [ 'page' ] [ 'admin' ] = self . page . is_admin if self . user : payload [ 'user' ] = { } if self . user . country : payload [ 'user' ] [ 'country' ] = self . user . country if self . user . locale : payload [ 'user' ] [ 'locale' ] = self . user . locale if self . user . age : payload [ 'user' ] [ 'age' ] = { 'min' : self . user . age [ 0 ] , 'max' : self . user . age [ - 1 ] } if self . user . oauth_token : if self . user . oauth_token . token : payload [ 'oauth_token' ] = self . user . oauth_token . token if self . user . oauth_token . expires_at is None : payload [ 'expires_in' ] = 0 else : payload [ 'expires_in' ] = int ( time . mktime ( self . user . oauth_token . expires_at . timetuple ( ) ) ) if self . user . oauth_token . issued_at : payload [ 'issued_at' ] = int ( time . mktime ( self . user . oauth_token . issued_at . timetuple ( ) ) ) if self . user . id : payload [ 'user_id' ] = self . user . id encoded_payload = base64 . urlsafe_b64encode ( json . dumps ( payload , separators = ( ',' , ':' ) ) . encode ( 'utf-8' ) ) encoded_signature = base64 . urlsafe_b64encode ( hmac . new ( self . application_secret_key . encode ( 'utf-8' ) , encoded_payload , hashlib . sha256 ) . digest ( ) ) return '%(signature)s.%(payload)s' % { 'signature' : encoded_signature , 'payload' : encoded_payload }
Generate a signed request from this instance .
561
9
247,636
def for_application ( self , id , secret_key , api_version = None ) : from facepy . utils import get_application_access_token access_token = get_application_access_token ( id , secret_key , api_version = api_version ) return GraphAPI ( access_token , version = api_version )
Initialize GraphAPI with an OAuth access token for an application .
75
14
247,637
def get ( self , path = '' , page = False , retry = 3 , * * options ) : response = self . _query ( method = 'GET' , path = path , data = options , page = page , retry = retry ) if response is False : raise FacebookError ( 'Could not get "%s".' % path ) return response
Get an item from the Graph API .
76
8
247,638
def post ( self , path = '' , retry = 0 , * * data ) : response = self . _query ( method = 'POST' , path = path , data = data , retry = retry ) if response is False : raise FacebookError ( 'Could not post to "%s"' % path ) return response
Post an item to the Graph API .
68
8
247,639
def search ( self , term , type = 'place' , page = False , retry = 3 , * * options ) : if type != 'place' : raise ValueError ( 'Unsupported type "%s". The only supported type is "place" since Graph API 2.0.' % type ) options = dict ( { 'q' : term , 'type' : type , } , * * options ) response = self . _query ( 'GET' , 'search' , options , page , retry ) return response
Search for an item in the Graph API .
110
9
247,640
def batch ( self , requests ) : for request in requests : if 'body' in request : request [ 'body' ] = urlencode ( request [ 'body' ] ) def _grouper ( complete_list , n = 1 ) : """ Batches a list into constant size chunks. :param complete_list: A input list (not a generator). :param n: The size of the chunk. Adapted from <http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python> """ for i in range ( 0 , len ( complete_list ) , n ) : yield complete_list [ i : i + n ] responses = [ ] # Maximum batch size for Facebook is 50 so split up requests # https://developers.facebook.com/docs/graph-api/making-multiple-requests/#limits for group in _grouper ( requests , 50 ) : responses += self . post ( batch = json . dumps ( group ) ) for response , request in zip ( responses , requests ) : # Facilitate for empty Graph API responses. # # https://github.com/jgorset/facepy/pull/30 if not response : yield None continue try : yield self . _parse ( response [ 'body' ] ) except FacepyError as exception : exception . request = request yield exception
Make a batch request .
306
5
247,641
def _parse ( self , data ) : if type ( data ) == type ( bytes ( ) ) : try : data = data . decode ( 'utf-8' ) except UnicodeDecodeError : return data try : data = json . loads ( data , parse_float = Decimal ) except ValueError : return data # Facebook's Graph API sometimes responds with 'true' or 'false'. Facebook offers no documentation # as to the prerequisites for this type of response, though it seems that it responds with 'true' # when objects are successfully deleted and 'false' upon attempting to delete or access an item that # one does not have access to. # # For example, the API would respond with 'false' upon attempting to query a feed item without having # the 'read_stream' extended permission. If you were to query the entire feed, however, it would respond # with an empty list instead. # # Genius. # # We'll handle this discrepancy as gracefully as we can by implementing logic to deal with this behavior # in the high-level access functions (get, post, delete etc.). if type ( data ) is dict : if 'error' in data : error = data [ 'error' ] if error . get ( 'type' ) == "OAuthException" : exception = OAuthError else : exception = FacebookError raise exception ( * * self . _get_error_params ( data ) ) # Facebook occasionally reports errors in its legacy error format. if 'error_msg' in data : raise FacebookError ( * * self . _get_error_params ( data ) ) return data
Parse the response from Facebook s Graph API .
336
10
247,642
def get_extended_access_token ( access_token , application_id , application_secret_key , api_version = None ) : graph = GraphAPI ( version = api_version ) response = graph . get ( path = 'oauth/access_token' , client_id = application_id , client_secret = application_secret_key , grant_type = 'fb_exchange_token' , fb_exchange_token = access_token ) try : components = parse_qs ( response ) except AttributeError : # api_version >= 2.3 returns a dict return response [ 'access_token' ] , None token = components [ 'access_token' ] [ 0 ] try : expires_at = datetime . now ( ) + timedelta ( seconds = int ( components [ 'expires' ] [ 0 ] ) ) except KeyError : # there is no expiration expires_at = None return token , expires_at
Get an extended OAuth access token .
204
8
247,643
def get_application_access_token ( application_id , application_secret_key , api_version = None ) : graph = GraphAPI ( version = api_version ) response = graph . get ( path = 'oauth/access_token' , client_id = application_id , client_secret = application_secret_key , grant_type = 'client_credentials' ) try : data = parse_qs ( response ) try : return data [ 'access_token' ] [ 0 ] except KeyError : raise GraphAPI . FacebookError ( 'No access token given' ) except AttributeError : # api_version >= 2.3 returns a dict return response [ 'access_token' ] , None
Get an OAuth access token for the given application .
153
11
247,644
def locked_get_or_set ( self , key , value_creator , version = None , expire = None , id = None , lock_key = None , timeout = DEFAULT_TIMEOUT ) : if lock_key is None : lock_key = 'get_or_set:' + key val = self . get ( key , version = version ) if val is not None : return val with self . lock ( lock_key , expire = expire , id = id ) : # Was the value set while we were trying to acquire the lock? val = self . get ( key , version = version ) if val is not None : return val # Nope, create value now. val = value_creator ( ) if val is None : raise ValueError ( '`value_creator` must return a value' ) self . set ( key , val , timeout = timeout , version = version ) return val
Fetch a given key from the cache . If the key does not exist the key is added and set to the value returned when calling value_creator . The creator function is invoked inside of a lock .
189
41
247,645
def _eval_script ( redis , script_id , * keys , * * kwargs ) : args = kwargs . pop ( 'args' , ( ) ) if kwargs : raise TypeError ( "Unexpected keyword arguments %s" % kwargs . keys ( ) ) try : return redis . evalsha ( SCRIPTS [ script_id ] , len ( keys ) , * keys + args ) except NoScriptError : logger . info ( "%s not cached." , SCRIPTS [ script_id + 2 ] ) return redis . eval ( SCRIPTS [ script_id + 1 ] , len ( keys ) , * keys + args )
Tries to call EVALSHA with the hash and then if it fails calls regular EVAL with the script .
146
23
247,646
def reset ( self ) : _eval_script ( self . _client , RESET , self . _name , self . _signal ) self . _delete_signal ( )
Forcibly deletes the lock . Use this with care .
39
13
247,647
def extend ( self , expire = None ) : if expire is None : if self . _expire is not None : expire = self . _expire else : raise TypeError ( "To extend a lock 'expire' must be provided as an " "argument to extend() method or at initialization time." ) error = _eval_script ( self . _client , EXTEND , self . _name , args = ( expire , self . _id ) ) if error == 1 : raise NotAcquired ( "Lock %s is not acquired or it already expired." % self . _name ) elif error == 2 : raise NotExpirable ( "Lock %s has no assigned expiration time" % self . _name ) elif error : raise RuntimeError ( "Unsupported error code %s from EXTEND script" % error )
Extends expiration time of the lock .
176
8
247,648
def _lock_renewer ( lockref , interval , stop ) : log = getLogger ( "%s.lock_refresher" % __name__ ) while not stop . wait ( timeout = interval ) : log . debug ( "Refreshing lock" ) lock = lockref ( ) if lock is None : log . debug ( "The lock no longer exists, " "stopping lock refreshing" ) break lock . extend ( expire = lock . _expire ) del lock log . debug ( "Exit requested, stopping lock refreshing" )
Renew the lock key in redis every interval seconds for as long as self . _lock_renewal_thread . should_exit is False .
116
32
247,649
def _start_lock_renewer ( self ) : if self . _lock_renewal_thread is not None : raise AlreadyStarted ( "Lock refresh thread already started" ) logger . debug ( "Starting thread to refresh lock every %s seconds" , self . _lock_renewal_interval ) self . _lock_renewal_stop = threading . Event ( ) self . _lock_renewal_thread = threading . Thread ( group = None , target = self . _lock_renewer , kwargs = { 'lockref' : weakref . ref ( self ) , 'interval' : self . _lock_renewal_interval , 'stop' : self . _lock_renewal_stop } ) self . _lock_renewal_thread . setDaemon ( True ) self . _lock_renewal_thread . start ( )
Starts the lock refresher thread .
199
8
247,650
def _stop_lock_renewer ( self ) : if self . _lock_renewal_thread is None or not self . _lock_renewal_thread . is_alive ( ) : return logger . debug ( "Signalling the lock refresher to stop" ) self . _lock_renewal_stop . set ( ) self . _lock_renewal_thread . join ( ) self . _lock_renewal_thread = None logger . debug ( "Lock refresher has stopped" )
Stop the lock renewer .
114
6
247,651
def release ( self ) : if self . _lock_renewal_thread is not None : self . _stop_lock_renewer ( ) logger . debug ( "Releasing %r." , self . _name ) error = _eval_script ( self . _client , UNLOCK , self . _name , self . _signal , args = ( self . _id , ) ) if error == 1 : raise NotAcquired ( "Lock %s is not acquired or it already expired." % self . _name ) elif error : raise RuntimeError ( "Unsupported error code %s from EXTEND script." % error ) else : self . _delete_signal ( )
Releases the lock that was acquired with the same object .
148
12
247,652
def get_detail ( self , course_id ) : # the request is done in behalf of the current logged in user resp = self . _requester . get ( urljoin ( self . _base_url , '/api/courses/v1/courses/{course_key}/' . format ( course_key = course_id ) ) ) resp . raise_for_status ( ) return CourseDetail ( resp . json ( ) )
Fetches course details .
97
6
247,653
def get_user_info ( self ) : # the request is done in behalf of the current logged in user resp = self . requester . get ( urljoin ( self . base_url , '/api/mobile/v0.5/my_user_info' ) ) resp . raise_for_status ( ) return Info ( resp . json ( ) )
Returns a UserInfo object for the logged in user .
77
11
247,654
def course_blocks ( self , course_id , username ) : resp = self . requester . get ( urljoin ( self . base_url , '/api/courses/v1/blocks/' ) , params = { "depth" : "all" , "username" : username , "course_id" : course_id , "requested_fields" : "children,display_name,id,type,visible_to_staff_only" , } ) resp . raise_for_status ( ) return Structure ( resp . json ( ) )
Fetches course blocks .
120
6
247,655
def get_student_current_grade ( self , username , course_id ) : # the request is done in behalf of the current logged in user resp = self . requester . get ( urljoin ( self . base_url , '/api/grades/v1/courses/{course_key}/?username={username}' . format ( username = username , course_key = course_id ) ) ) resp . raise_for_status ( ) return CurrentGrade ( resp . json ( ) [ 0 ] )
Returns an CurrentGrade object for the user in a course
110
11
247,656
def get_student_current_grades ( self , username , course_ids = None ) : # if no course ids are provided, let's get the user enrollments if course_ids is None : enrollments_client = CourseEnrollments ( self . requester , self . base_url ) enrollments = enrollments_client . get_student_enrollments ( ) course_ids = list ( enrollments . get_enrolled_course_ids ( ) ) all_current_grades = [ ] for course_id in course_ids : try : all_current_grades . append ( self . get_student_current_grade ( username , course_id ) ) except HTTPError as error : if error . response . status_code >= 500 : raise return CurrentGradesByUser ( all_current_grades )
Returns a CurrentGradesByUser object with the user current grades .
176
14
247,657
def get_course_current_grades ( self , course_id ) : resp = self . requester . get ( urljoin ( self . base_url , '/api/grades/v1/courses/{course_key}/' . format ( course_key = course_id ) ) ) resp . raise_for_status ( ) resp_json = resp . json ( ) if 'results' in resp_json : grade_entries = [ CurrentGrade ( entry ) for entry in resp_json [ "results" ] ] while resp_json [ 'next' ] is not None : resp = self . requester . get ( resp_json [ 'next' ] ) resp . raise_for_status ( ) resp_json = resp . json ( ) grade_entries . extend ( ( CurrentGrade ( entry ) for entry in resp_json [ "results" ] ) ) else : grade_entries = [ CurrentGrade ( entry ) for entry in resp_json ] return CurrentGradesByCourse ( grade_entries )
Returns a CurrentGradesByCourse object for all users in the specified course .
222
16
247,658
def get_requester ( self ) : # TODO(abrahms): Perhaps pull this out into a factory function for # generating an EdxApi instance with the proper requester & credentials. session = requests . session ( ) session . headers . update ( { 'Authorization' : 'Bearer {}' . format ( self . credentials [ 'access_token' ] ) } ) old_request = session . request def patched_request ( * args , * * kwargs ) : """ adds timeout param to session.request """ return old_request ( * args , timeout = self . timeout , * * kwargs ) session . request = patched_request return session
Returns an object to make authenticated requests . See python requests for the API .
142
15
247,659
def create ( self , master_course_id , coach_email , max_students_allowed , title , modules = None ) : payload = { 'master_course_id' : master_course_id , 'coach_email' : coach_email , 'max_students_allowed' : max_students_allowed , 'display_name' : title , } if modules is not None : payload [ 'course_modules' ] = modules resp = self . requester . post ( parse . urljoin ( self . base_url , '/api/ccx/v0/ccx/' ) , json = payload ) try : resp . raise_for_status ( ) except : log . error ( resp . json ( ) ) raise return resp . json ( ) [ 'ccx_course_id' ]
Creates a CCX
177
5
247,660
def _get_enrollments_list_page ( self , params = None ) : req_url = urljoin ( self . base_url , self . enrollment_list_url ) resp = self . requester . get ( req_url , params = params ) resp . raise_for_status ( ) resp_json = resp . json ( ) results = resp_json [ 'results' ] next_url_str = resp_json . get ( 'next' ) cursor = None qstr_cursor = None if next_url_str : next_url = urlparse ( next_url_str ) qstr = parse_qs ( next_url . query ) qstr_cursor = qstr . get ( 'cursor' ) if qstr_cursor and isinstance ( qstr_cursor , list ) : cursor = qstr_cursor [ 0 ] return results , cursor
Submit request to retrieve enrollments list .
193
8
247,661
def get_enrollments ( self , course_id = None , usernames = None ) : params = { } if course_id is not None : params [ 'course_id' ] = course_id if usernames is not None and isinstance ( usernames , list ) : params [ 'username' ] = ',' . join ( usernames ) done = False while not done : enrollments , next_cursor = self . _get_enrollments_list_page ( params ) for enrollment in enrollments : yield Enrollment ( enrollment ) if next_cursor : params [ 'cursor' ] = next_cursor else : done = True
List all course enrollments .
145
6
247,662
def get_student_enrollments ( self ) : # the request is done in behalf of the current logged in user resp = self . requester . get ( urljoin ( self . base_url , self . enrollment_url ) ) resp . raise_for_status ( ) return Enrollments ( resp . json ( ) )
Returns an Enrollments object with the user enrollments
70
11
247,663
def create_audit_student_enrollment ( self , course_id ) : audit_enrollment = { "mode" : "audit" , "course_details" : { "course_id" : course_id } } # the request is done in behalf of the current logged in user resp = self . requester . post ( urljoin ( self . base_url , self . enrollment_url ) , json = audit_enrollment ) resp . raise_for_status ( ) return Enrollment ( resp . json ( ) )
Creates an audit enrollment for the user in a given course
115
12
247,664
def get_student_certificate ( self , username , course_id ) : # the request is done in behalf of the current logged in user resp = self . requester . get ( urljoin ( self . base_url , '/api/certificates/v0/certificates/{username}/courses/{course_key}/' . format ( username = username , course_key = course_id ) ) ) resp . raise_for_status ( ) return Certificate ( resp . json ( ) )
Returns an Certificate object with the user certificates
111
8
247,665
def get_student_certificates ( self , username , course_ids = None ) : # if no course ids are provided, let's get the user enrollments if course_ids is None : enrollments_client = CourseEnrollments ( self . requester , self . base_url ) enrollments = enrollments_client . get_student_enrollments ( ) course_ids = list ( enrollments . get_enrolled_course_ids ( ) ) all_certificates = [ ] for course_id in course_ids : try : all_certificates . append ( self . get_student_certificate ( username , course_id ) ) except HTTPError as error : if error . response . status_code >= 500 : raise return Certificates ( all_certificates )
Returns an Certificates object with the user certificates
173
10
247,666
def get_colors ( img ) : w , h = img . size return [ color [ : 3 ] for count , color in img . convert ( 'RGB' ) . getcolors ( w * h ) ]
Returns a list of all the image s colors .
46
10
247,667
def clamp ( color , min_v , max_v ) : h , s , v = rgb_to_hsv ( * map ( down_scale , color ) ) min_v , max_v = map ( down_scale , ( min_v , max_v ) ) v = min ( max ( min_v , v ) , max_v ) return tuple ( map ( up_scale , hsv_to_rgb ( h , s , v ) ) )
Clamps a color such that the value is between min_v and max_v .
103
18
247,668
def order_by_hue ( colors ) : hsvs = [ rgb_to_hsv ( * map ( down_scale , color ) ) for color in colors ] hsvs . sort ( key = lambda t : t [ 0 ] ) return [ tuple ( map ( up_scale , hsv_to_rgb ( * hsv ) ) ) for hsv in hsvs ]
Orders colors by hue .
86
6
247,669
def brighten ( color , brightness ) : h , s , v = rgb_to_hsv ( * map ( down_scale , color ) ) return tuple ( map ( up_scale , hsv_to_rgb ( h , s , v + down_scale ( brightness ) ) ) )
Adds or subtracts value to a color .
64
9
247,670
def colorz ( fd , n = DEFAULT_NUM_COLORS , min_v = DEFAULT_MINV , max_v = DEFAULT_MAXV , bold_add = DEFAULT_BOLD_ADD , order_colors = True ) : img = Image . open ( fd ) img . thumbnail ( THUMB_SIZE ) obs = get_colors ( img ) clamped = [ clamp ( color , min_v , max_v ) for color in obs ] clusters , _ = kmeans ( array ( clamped ) . astype ( float ) , n ) colors = order_by_hue ( clusters ) if order_colors else clusters return list ( zip ( colors , [ brighten ( c , bold_add ) for c in colors ] ) )
Get the n most dominant colors of an image . Clamps value to between min_v and max_v .
170
23
247,671
def long_description ( * paths ) : result = '' # attempt to import pandoc try : import pypandoc except ( ImportError , OSError ) as e : print ( "Unable to import pypandoc - %s" % e ) return result # attempt md -> rst conversion try : for path in paths : result += '\n' + pypandoc . convert ( path , 'rst' , format = 'markdown' ) except ( OSError , IOError ) as e : print ( "Failed to convert with pypandoc - %s" % e ) return result return result
Returns a RST formated string .
136
8
247,672
def memory_full ( ) : current_process = psutil . Process ( os . getpid ( ) ) return ( current_process . memory_percent ( ) > config . MAXIMUM_CACHE_MEMORY_PERCENTAGE )
Check if the memory is too full for further caching .
53
11
247,673
def cache ( cache = { } , maxmem = config . MAXIMUM_CACHE_MEMORY_PERCENTAGE , typed = False ) : # Constants shared by all lru cache instances: # Unique object used to signal cache misses. sentinel = object ( ) # Build a key from the function arguments. make_key = _make_key def decorating_function ( user_function , hits = 0 , misses = 0 ) : full = False # Bound method to look up a key or return None. cache_get = cache . get if not maxmem : def wrapper ( * args , * * kwds ) : # Simple caching without memory limit. nonlocal hits , misses key = make_key ( args , kwds , typed ) result = cache_get ( key , sentinel ) if result is not sentinel : hits += 1 return result result = user_function ( * args , * * kwds ) cache [ key ] = result misses += 1 return result else : def wrapper ( * args , * * kwds ) : # Memory-limited caching. nonlocal hits , misses , full key = make_key ( args , kwds , typed ) result = cache_get ( key ) if result is not None : hits += 1 return result result = user_function ( * args , * * kwds ) if not full : cache [ key ] = result # Cache is full if the total recursive usage is greater # than the maximum allowed percentage. current_process = psutil . Process ( os . getpid ( ) ) full = current_process . memory_percent ( ) > maxmem misses += 1 return result def cache_info ( ) : """Report cache statistics.""" return _CacheInfo ( hits , misses , len ( cache ) ) def cache_clear ( ) : """Clear the cache and cache statistics.""" nonlocal hits , misses , full cache . clear ( ) hits = misses = 0 full = False wrapper . cache_info = cache_info wrapper . cache_clear = cache_clear return update_wrapper ( wrapper , user_function ) return decorating_function
Memory - limited cache decorator .
447
7
247,674
def MICECache ( subsystem , parent_cache = None ) : if config . REDIS_CACHE : cls = RedisMICECache else : cls = DictMICECache return cls ( subsystem , parent_cache = parent_cache )
Construct a |MICE| cache .
56
8
247,675
def method ( cache_name , key_prefix = None ) : def decorator ( func ) : if ( func . __name__ in [ 'cause_repertoire' , 'effect_repertoire' ] and not config . CACHE_REPERTOIRES ) : return func @ wraps ( func ) def wrapper ( obj , * args , * * kwargs ) : cache = getattr ( obj , cache_name ) # Delegate key generation key = cache . key ( * args , _prefix = key_prefix , * * kwargs ) # Get cached value, or compute value = cache . get ( key ) if value is None : # miss value = func ( obj , * args , * * kwargs ) cache . set ( key , value ) return value return wrapper return decorator
Caching decorator for object - level method caches .
175
11
247,676
def get ( self , key ) : if key in self . cache : self . hits += 1 return self . cache [ key ] self . misses += 1 return None
Get a value out of the cache .
34
8
247,677
def key ( self , * args , _prefix = None , * * kwargs ) : if kwargs : raise NotImplementedError ( 'kwarg cache keys not implemented' ) return ( _prefix , ) + tuple ( args )
Get the cache key for the given function args .
52
10
247,678
def info ( self ) : info = redis_conn . info ( ) return _CacheInfo ( info [ 'keyspace_hits' ] , info [ 'keyspace_misses' ] , self . size ( ) )
Return cache information .
49
4
247,679
def set ( self , key , value ) : if not self . subsystem . is_cut : super ( ) . set ( key , value )
Only need to set if the subsystem is uncut .
30
11
247,680
def _build ( self , parent_cache ) : for key , mice in parent_cache . cache . items ( ) : if not mice . damaged_by_cut ( self . subsystem ) : self . cache [ key ] = mice
Build the initial cache from the parent .
49
8
247,681
def set ( self , key , value ) : if config . CACHE_POTENTIAL_PURVIEWS : self . cache [ key ] = value
Only set if purview caching is enabled
35
8
247,682
def apply_boundary_conditions_to_cm ( external_indices , cm ) : cm = cm . copy ( ) cm [ external_indices , : ] = 0 # Zero-out row cm [ : , external_indices ] = 0 # Zero-out columnt return cm
Remove connections to or from external nodes .
64
8
247,683
def get_inputs_from_cm ( index , cm ) : return tuple ( i for i in range ( cm . shape [ 0 ] ) if cm [ i ] [ index ] )
Return indices of inputs to the node with the given index .
40
12
247,684
def get_outputs_from_cm ( index , cm ) : return tuple ( i for i in range ( cm . shape [ 0 ] ) if cm [ index ] [ i ] )
Return indices of the outputs of node with the given index .
40
12
247,685
def causally_significant_nodes ( cm ) : inputs = cm . sum ( 0 ) outputs = cm . sum ( 1 ) nodes_with_inputs_and_outputs = np . logical_and ( inputs > 0 , outputs > 0 ) return tuple ( np . where ( nodes_with_inputs_and_outputs ) [ 0 ] )
Return indices of nodes that have both inputs and outputs .
77
11
247,686
def relevant_connections ( n , _from , to ) : cm = np . zeros ( ( n , n ) ) # Don't try and index with empty arrays. Older versions of NumPy # (at least up to 1.9.3) break with empty array indices. if not _from or not to : return cm cm [ np . ix_ ( _from , to ) ] = 1 return cm
Construct a connectivity matrix .
88
5
247,687
def block_cm ( cm ) : if np . any ( cm . sum ( 1 ) == 0 ) : return True if np . all ( cm . sum ( 1 ) == 1 ) : return True outputs = list ( range ( cm . shape [ 1 ] ) ) # CM helpers: def outputs_of ( nodes ) : """Return all nodes that `nodes` connect to (output to).""" return np . where ( cm [ nodes , : ] . sum ( 0 ) ) [ 0 ] def inputs_to ( nodes ) : """Return all nodes which connect to (input to) `nodes`.""" return np . where ( cm [ : , nodes ] . sum ( 1 ) ) [ 0 ] # Start: source node with most outputs sources = [ np . argmax ( cm . sum ( 1 ) ) ] sinks = outputs_of ( sources ) sink_inputs = inputs_to ( sinks ) while True : if np . array_equal ( sink_inputs , sources ) : # sources exclusively connect to sinks. # There are no other nodes which connect sink nodes, # hence set(sources) + set(sinks) form a component # which is not connected to the rest of the graph return True # Recompute sources, sinks, and sink_inputs sources = sink_inputs sinks = outputs_of ( sources ) sink_inputs = inputs_to ( sinks ) # Considering all output nodes? if np . array_equal ( sinks , outputs ) : return False
Return whether cm can be arranged as a block connectivity matrix .
315
12
247,688
def block_reducible ( cm , nodes1 , nodes2 ) : # Trivial case if not nodes1 or not nodes2 : return True cm = cm [ np . ix_ ( nodes1 , nodes2 ) ] # Validate the connectivity matrix. if not cm . sum ( 0 ) . all ( ) or not cm . sum ( 1 ) . all ( ) : return True if len ( nodes1 ) > 1 and len ( nodes2 ) > 1 : return block_cm ( cm ) return False
Return whether connections from nodes1 to nodes2 are reducible .
109
13
247,689
def _connected ( cm , nodes , connection ) : if nodes is not None : cm = cm [ np . ix_ ( nodes , nodes ) ] num_components , _ = connected_components ( cm , connection = connection ) return num_components < 2
Test connectivity for the connectivity matrix .
57
7
247,690
def is_full ( cm , nodes1 , nodes2 ) : if not nodes1 or not nodes2 : return True cm = cm [ np . ix_ ( nodes1 , nodes2 ) ] # Do all nodes have at least one connection? return cm . sum ( 0 ) . all ( ) and cm . sum ( 1 ) . all ( )
Test connectivity of one set of nodes to another .
74
10
247,691
def apply_cut ( self , cm ) : # Invert the cut matrix, creating a matrix of preserved connections inverse = np . logical_not ( self . cut_matrix ( cm . shape [ 0 ] ) ) . astype ( int ) return cm * inverse
Return a modified connectivity matrix with all connections that are severed by this cut removed .
56
16
247,692
def cuts_connections ( self , a , b ) : n = max ( self . indices ) + 1 return self . cut_matrix ( n ) [ np . ix_ ( a , b ) ] . any ( )
Check if this cut severs any connections from a to b .
49
13
247,693
def all_cut_mechanisms ( self ) : for mechanism in utils . powerset ( self . indices , nonempty = True ) : if self . splits_mechanism ( mechanism ) : yield mechanism
Return all mechanisms with elements on both sides of this cut .
45
12
247,694
def cut_matrix ( self , n ) : return connectivity . relevant_connections ( n , self . from_nodes , self . to_nodes )
Compute the cut matrix for this cut .
35
9
247,695
def cut_matrix ( self , n ) : cm = np . zeros ( ( n , n ) ) for part in self . partition : from_ , to = self . direction . order ( part . mechanism , part . purview ) # All indices external to this part external = tuple ( set ( self . indices ) - set ( to ) ) cm [ np . ix_ ( from_ , external ) ] = 1 return cm
The matrix of connections that are severed by this cut .
92
11
247,696
def concept_distance ( c1 , c2 ) : # Calculate the sum of the cause and effect EMDs, expanding the repertoires # to the combined purview of the two concepts, so that the EMD signatures # are the same size. cause_purview = tuple ( set ( c1 . cause . purview + c2 . cause . purview ) ) effect_purview = tuple ( set ( c1 . effect . purview + c2 . effect . purview ) ) # Take the sum return ( repertoire_distance ( c1 . expand_cause_repertoire ( cause_purview ) , c2 . expand_cause_repertoire ( cause_purview ) ) + repertoire_distance ( c1 . expand_effect_repertoire ( effect_purview ) , c2 . expand_effect_repertoire ( effect_purview ) ) )
Return the distance between two concepts in concept space .
193
10
247,697
def small_phi_ces_distance ( C1 , C2 ) : return sum ( c . phi for c in C1 ) - sum ( c . phi for c in C2 )
Return the difference in |small_phi| between |CauseEffectStructure| .
42
17
247,698
def generate_nodes ( tpm , cm , network_state , indices , node_labels = None ) : if node_labels is None : node_labels = NodeLabels ( None , indices ) node_state = utils . state_of ( indices , network_state ) return tuple ( Node ( tpm , cm , index , state , node_labels ) for index , state in zip ( indices , node_state ) )
Generate |Node| objects for a subsystem .
96
10
247,699
def expand_node_tpm ( tpm ) : uc = np . ones ( [ 2 for node in tpm . shape ] ) return uc * tpm
Broadcast a node TPM over the full network .
36
11