idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
36,700 | def new_binary_container ( self , name ) : self . _message_stack . append ( BinaryContainerTemplate ( name , self . _current_container ) ) | Defines a new binary container to template . |
36,701 | def end_binary_container ( self ) : binary_container = self . _message_stack . pop ( ) binary_container . verify ( ) self . _add_field ( binary_container ) | End binary container . See New Binary Container . |
36,702 | def bin ( self , size , name , value = None ) : self . _add_field ( Binary ( size , name , value ) ) | Add new binary field to template . |
36,703 | def new_union ( self , type , name ) : self . _message_stack . append ( UnionTemplate ( type , name , self . _current_container ) ) | Defines a new union to template of type and name . |
36,704 | def start_bag ( self , name ) : self . _message_stack . append ( BagTemplate ( name , self . _current_container ) ) | Bags are sets of optional elements with an optional count . |
36,705 | def value ( self , name , value ) : if isinstance ( value , _StructuredElement ) : self . _struct_fields_as_values ( name , value ) elif name . startswith ( 'header:' ) : self . _header_values [ name . partition ( ':' ) [ - 1 ] ] = value else : self . _field_values [ name ] = value | Defines a default value for a template field identified by name . |
36,706 | def conditional ( self , condition , name ) : self . _message_stack . append ( ConditionalTemplate ( condition , name , self . _current_container ) ) | Defines a condition when conditional element of name exists if condition is true . |
36,707 | def get_client_unread_messages_count ( self , client_name = None ) : client = self . _clients . get_with_name ( client_name ) [ 0 ] return client . get_messages_count_in_buffer ( ) | Gets count of unread messages from client |
36,708 | def get_server_unread_messages_count ( self , server_name = None ) : server = self . _servers . get ( server_name ) return server . get_messages_count_in_buffer ( ) | Gets count of unread messages from server |
36,709 | def to_twos_comp ( val , bits ) : if not val . startswith ( '-' ) : return to_int ( val ) value = _invert ( to_bin_str_from_int_string ( bits , bin ( to_int ( val [ 1 : ] ) ) ) ) return int ( value , 2 ) + 1 | compute the 2 s compliment of int value val |
36,710 | def u8 ( self , name , value = None , align = None ) : self . uint ( 1 , name , value , align ) | Add an unsigned 1 byte integer field to template . |
36,711 | def u16 ( self , name , value = None , align = None ) : self . uint ( 2 , name , value , align ) | Add an unsigned 2 byte integer field to template . |
36,712 | def u24 ( self , name , value = None , align = None ) : self . uint ( 3 , name , value , align ) | Add an unsigned 3 byte integer field to template . |
36,713 | def u32 ( self , name , value = None , align = None ) : self . uint ( 4 , name , value , align ) | Add an unsigned 4 byte integer field to template . |
36,714 | def u40 ( self , name , value = None , align = None ) : self . uint ( 5 , name , value , align ) | Add an unsigned 5 byte integer field to template . |
36,715 | def u64 ( self , name , value = None , align = None ) : self . uint ( 8 , name , value , align ) | Add an unsigned 8 byte integer field to template . |
36,716 | def u128 ( self , name , value = None , align = None ) : self . uint ( 16 , name , value , align ) | Add an unsigned 16 byte integer field to template . |
36,717 | def i8 ( self , name , value = None , align = None ) : self . int ( 1 , name , value , align ) | Add an 1 byte integer field to template . |
36,718 | def i32 ( self , name , value = None , align = None ) : self . int ( 4 , name , value , align ) | Add an 32 byte integer field to template . |
36,719 | def array ( self , size , type , name , * parameters ) : self . _new_list ( size , name ) BuiltIn ( ) . run_keyword ( type , '' , * parameters ) self . _end_list ( ) | Define a new array of given size and containing fields of type type . |
36,720 | def container ( self , name , length , type , * parameters ) : self . new_struct ( 'Container' , name , 'length=%s' % length ) BuiltIn ( ) . run_keyword ( type , * parameters ) self . end_struct ( ) | Define a container with given length . |
36,721 | def case ( self , size , kw , * parameters ) : self . _start_bag_case ( size ) BuiltIn ( ) . run_keyword ( kw , * parameters ) self . _end_bag_case ( ) | An element inside a bag started with Start Bag . |
36,722 | def embed_seqdiag_sequence ( self ) : test_name = BuiltIn ( ) . replace_variables ( '${TEST NAME}' ) outputdir = BuiltIn ( ) . replace_variables ( '${OUTPUTDIR}' ) path = os . path . join ( outputdir , test_name + '.seqdiag' ) SeqdiagGenerator ( ) . compile ( path , self . _message_sequence ) | Create a message sequence diagram png file to output folder and embed the image to log file . |
36,723 | def _check_pypi_version ( self ) : try : check_version ( ) except VersionException as err : print ( str ( err ) , file = sys . stderr ) time . sleep ( TWO_SECONDS ) | When the version is out of date or we have trouble retrieving it print a error to stderr and pause . |
36,724 | def get_ca_certs ( environ = os . environ ) : cert_paths = environ . get ( "TXAWS_CERTS_PATH" , DEFAULT_CERTS_PATH ) . split ( ":" ) certificate_authority_map = { } for path in cert_paths : if not path : continue for cert_file_name in glob ( os . path . join ( path , "*.pem" ) ) : if not os . path . exists ( cert_file_name ) : continue cert_file = open ( cert_file_name ) data = cert_file . read ( ) cert_file . close ( ) x509 = load_certificate ( FILETYPE_PEM , data ) digest = x509 . digest ( "sha1" ) certificate_authority_map [ digest ] = x509 values = certificate_authority_map . values ( ) if len ( values ) == 0 : raise exception . CertsNotFoundError ( "Could not find any .pem files." ) return values | Retrieve a list of CAs at either the DEFAULT_CERTS_PATH or the env override TXAWS_CERTS_PATH . |
36,725 | def iso8601time ( time_tuple ) : if time_tuple : return time . strftime ( "%Y-%m-%dT%H:%M:%SZ" , time_tuple ) else : return time . strftime ( "%Y-%m-%dT%H:%M:%SZ" , time . gmtime ( ) ) | Format time_tuple as a ISO8601 time string . |
36,726 | def parse ( url , defaultPort = True ) : url = url . strip ( ) parsed = urlparse ( url ) scheme = parsed [ 0 ] path = urlunparse ( ( "" , "" ) + parsed [ 2 : ] ) host = parsed [ 1 ] if ":" in host : host , port = host . split ( ":" ) try : port = int ( port ) except ValueError : port = None else : port = None if port is None and defaultPort : if scheme == "https" : port = 443 else : port = 80 if path == "" : path = "/" return ( str ( scheme ) , str ( host ) , port , str ( path ) ) | Split the given URL into the scheme host port and path . |
36,727 | def claim_new_token ( self ) : headers = { 'Content-Type' : ContentType . json , 'User-Agent' : self . user_agent_str , } data = { "agent_key" : self . config . agent_key , "user_key" : self . config . user_key , } url_suffix = "/software_agents/api_token" url = self . config . url + url_suffix response = requests . post ( url , headers = headers , data = json . dumps ( data ) ) if response . status_code == 404 : if not self . config . agent_key : raise MissingInitialSetupError ( ) else : raise SoftwareAgentNotFoundError ( ) elif response . status_code == 503 : raise DataServiceError ( response , url_suffix , data ) elif response . status_code != 201 : raise AuthTokenCreationError ( response ) resp_json = response . json ( ) self . _auth = resp_json [ 'api_token' ] self . _expires = resp_json [ 'expires_on' ] | Update internal state to have a new token using a no authorization data service . |
36,728 | def handle ( self , request ) : request . id = str ( uuid4 ( ) ) deferred = maybeDeferred ( self . _validate , request ) deferred . addCallback ( self . execute ) def write_response ( response ) : request . setHeader ( "Content-Length" , str ( len ( response ) ) ) request . setHeader ( "Content-Type" , self . content_type ) request . setHeader ( "X-Content-Type-Options" , "nosniff" ) request . write ( response ) request . finish ( ) return response def write_error ( failure ) : if failure . check ( APIError ) : status = failure . value . status if status < 400 or status >= 500 : log . err ( failure ) else : log . msg ( "status: %s message: %s" % ( status , safe_str ( failure . value ) ) ) body = failure . value . response if body is None : body = self . dump_error ( failure . value , request ) else : log . err ( failure ) body = "Server error" status = 500 request . setResponseCode ( status ) write_response ( body ) deferred . addCallback ( write_response ) deferred . addErrback ( write_error ) return deferred | Handle an HTTP request for executing an API call . |
36,729 | def get_call_arguments ( self , request ) : params = dict ( ( k , v [ - 1 ] ) for k , v in request . args . iteritems ( ) ) args , rest = self . schema . extract ( params ) params . pop ( "Signature" ) result = { "transport_args" : { "action" : args . Action , "access_key_id" : args . AWSAccessKeyId , "timestamp" : args . Timestamp , "expires" : args . Expires , "version" : args . Version , "signature_method" : args . SignatureMethod , "signature" : args . Signature , "signature_version" : args . SignatureVersion } , "handler_args" : rest , "raw_args" : params } return result | Get call arguments from a request . Override this if you want to use a wire format different from AWS s . |
36,730 | def _validate_generic_parameters ( self , args ) : utc_now = self . get_utc_time ( ) if getattr ( self , "actions" , None ) is not None : if not args [ "action" ] in self . actions : raise APIError ( 400 , "InvalidAction" , "The action %s is not " "valid for this web service." % args [ "action" ] ) else : self . registry . check ( args [ "action" ] , args [ "version" ] ) if not args [ "signature_version" ] in self . signature_versions : raise APIError ( 403 , "InvalidSignature" , "SignatureVersion '%s' " "not supported" % args [ "signature_version" ] ) if args [ "expires" ] and args [ "timestamp" ] : raise APIError ( 400 , "InvalidParameterCombination" , "The parameter Timestamp cannot be used with " "the parameter Expires" ) if args [ "expires" ] and args [ "expires" ] < utc_now : raise APIError ( 400 , "RequestExpired" , "Request has expired. Expires date is %s" % ( args [ "expires" ] . strftime ( self . time_format ) ) ) if ( args [ "timestamp" ] and args [ "timestamp" ] + timedelta ( minutes = 15 ) < utc_now ) : raise APIError ( 400 , "RequestExpired" , "Request has expired. Timestamp date is %s" % ( args [ "timestamp" ] . strftime ( self . time_format ) ) ) | Validate the generic request parameters . |
36,731 | def _validate_signature ( self , request , principal , args , params ) : creds = AWSCredentials ( principal . access_key , principal . secret_key ) endpoint = AWSServiceEndpoint ( ) endpoint . set_method ( request . method ) endpoint . set_canonical_host ( request . getHeader ( "Host" ) ) path = request . path if self . path is not None : path = "%s/%s" % ( self . path . rstrip ( "/" ) , path . lstrip ( "/" ) ) endpoint . set_path ( path ) signature = Signature ( creds , endpoint , params , signature_method = args [ "signature_method" ] , signature_version = args [ "signature_version" ] ) if signature . compute ( ) != args [ "signature" ] : raise APIError ( 403 , "SignatureDoesNotMatch" , "The request signature we calculated does not " "match the signature you provided. Check your " "key and signing method." ) | Validate the signature . |
36,732 | def render_GET ( self , request ) : if not request . args : request . setHeader ( "Content-Type" , "text/plain" ) return self . get_status_text ( ) else : self . handle ( request ) return NOT_DONE_YET | Handle a GET request . |
36,733 | def getSignatureKey ( key , dateStamp , regionName , serviceName ) : kDate = sign ( ( b'AWS4' + key ) , dateStamp ) kRegion = sign ( kDate , regionName ) kService = sign ( kRegion , serviceName ) kSigning = sign ( kService , b'aws4_request' ) return kSigning | Generate the signing key for AWS V4 requests . |
36,734 | def _make_canonical_uri ( parsed ) : path = urllib . quote ( parsed . path ) canonical_parsed = parsed . _replace ( path = path , params = '' , query = '' , fragment = '' ) return urlparse . urlunparse ( canonical_parsed ) | Return the canonical URI for a parsed URL . |
36,735 | def _make_canonical_query_string ( parsed ) : query_params = urlparse . parse_qs ( parsed . query , keep_blank_values = True ) sorted_query_params = sorted ( ( k , v ) for k , vs in query_params . items ( ) for v in vs ) return urllib . urlencode ( sorted_query_params ) | Return the canonical query string for a parsed URL . |
36,736 | def _make_canonical_headers ( headers , headers_to_sign ) : pairs = [ ] for name in headers_to_sign : if name not in headers : continue values = headers [ name ] if not isinstance ( values , ( list , tuple ) ) : values = [ values ] comma_values = b',' . join ( ' ' . join ( line . strip ( ) . split ( ) ) for value in values for line in value . splitlines ( ) ) pairs . append ( ( name . lower ( ) , comma_values ) ) sorted_pairs = sorted ( b'%s:%s' % ( name , value ) for name , value in sorted ( pairs ) ) return b'\n' . join ( sorted_pairs ) + b'\n' | Return canonicalized headers . |
36,737 | def _make_signed_headers ( headers , headers_to_sign ) : return b";" . join ( header . lower ( ) for header in sorted ( headers_to_sign ) if header in headers ) | Return a semicolon - delimited list of headers to sign . |
36,738 | def linear_interpolation ( first , last , steps ) : result = [ ] for step in xrange ( 0 , steps ) : fpart = ( steps - step ) * first lpart = ( step + 1 ) * last value = ( fpart + lpart ) / float ( steps + 1 ) result . append ( value ) return result | Interpolates all missing values using linear interpolation . |
36,739 | def visit_file ( self , item , parent ) : if self . is_large_file ( item ) : self . large_items . append ( ( item , parent ) ) else : self . small_item_task_builder . visit_file ( item , parent ) | If file is large add it to the large items to be processed after small task list . else file is small add it to the small task list . |
36,740 | def upload_large_items ( self ) : for local_file , parent in self . large_items : if local_file . need_to_send : self . process_large_file ( local_file , parent ) | Upload files that were too large . |
36,741 | def visit_project ( self , item ) : if not item . remote_id : command = CreateProjectCommand ( self . settings , item ) self . task_runner_add ( None , item , command ) else : self . settings . project_id = item . remote_id | Adds create project command to task runner if project doesn t already exist . |
36,742 | def visit_folder ( self , item , parent ) : if not item . remote_id : command = CreateFolderCommand ( self . settings , item , parent ) self . task_runner_add ( parent , item , command ) | Adds create folder command to task runner if folder doesn t already exist . |
36,743 | def visit_file ( self , item , parent ) : if item . need_to_send : if item . size > self . settings . config . upload_bytes_per_chunk : msg = "Programmer Error: Trying to upload large file as small item size:{} name:{}" raise ValueError ( msg . format ( item . size , item . name ) ) else : command = CreateSmallFileCommand ( self . settings , item , parent , self . settings . file_upload_post_processor ) self . task_runner_add ( parent , item , command ) | If file is small add create small file command otherwise raise error . Large files shouldn t be passed to SmallItemUploadTaskBuilder . |
36,744 | def bradykinesia ( self , data_frame , method = 'fft' ) : try : data_frame_resampled = self . resample_signal ( data_frame ) data_frame_dc = self . dc_remove_signal ( data_frame_resampled ) data_frame_filtered = self . filter_signal ( data_frame_dc , 'dc_mag_sum_acc' ) if method == 'fft' : data_frame_fft = self . fft_signal ( data_frame_filtered ) return self . amplitude_by_fft ( data_frame_fft ) else : return self . amplitude_by_welch ( data_frame_filtered ) except ValueError as verr : logging . error ( "TremorProcessor bradykinesia ValueError ->%s" , verr . message ) except : logging . error ( "Unexpected error on TemorProcessor bradykinesia: %s" , sys . exc_info ( ) [ 0 ] ) | This method calculates the bradykinesia amplitude of the data frame . It accepts two different methods \ fft and welch . First the signal gets re - sampled dc removed and then high pass filtered . |
36,745 | def main ( argv , reactor = None ) : if reactor is None : from twisted . internet import gtk2reactor gtk2reactor . install ( ) from twisted . internet import reactor try : AWSStatusIndicator ( reactor ) gobject . set_application_name ( "aws-status" ) reactor . run ( ) except ValueError : pass | Run the client GUI . |
36,746 | def main ( arguments , output = None , testing_mode = None ) : def run_command ( arguments , output , reactor ) : if output is None : output = sys . stdout try : command = get_command ( arguments , output ) except UsageError : print >> output , USAGE_MESSAGE . strip ( ) if reactor : reactor . callLater ( 0 , reactor . stop ) except Exception , e : print >> output , "ERROR:" , str ( e ) if reactor : reactor . callLater ( 0 , reactor . stop ) else : deferred = command . run ( ) if reactor : deferred . addCallback ( lambda ignored : reactor . stop ( ) ) if not testing_mode : from twisted . internet import reactor reactor . callLater ( 0 , run_command , arguments , output , reactor ) reactor . run ( ) else : run_command ( arguments , output , None ) | Entry point parses command - line arguments runs the specified EC2 API method and prints the response to the screen . |
36,747 | def execute ( self , timeSeries ) : self . _calculate_values_to_forecast ( timeSeries ) alpha = self . _parameters [ "smoothingFactor" ] valuesToForecast = self . _parameters [ "valuesToForecast" ] resultList = [ ] estimator = None lastT = None append = resultList . append for idx in xrange ( len ( timeSeries ) ) : t = timeSeries [ idx ] if estimator is None : estimator = t [ 1 ] continue if 0 == len ( resultList ) : append ( [ t [ 0 ] , estimator ] ) lastT = t continue error = lastT [ 1 ] - estimator estimator = estimator + alpha * error lastT = t append ( [ t [ 0 ] , estimator ] ) if valuesToForecast > 0 : currentTime = resultList [ - 1 ] [ 0 ] normalizedTimeDiff = currentTime - resultList [ - 2 ] [ 0 ] for idx in xrange ( valuesToForecast ) : currentTime += normalizedTimeDiff error = lastT [ 1 ] - estimator estimator = estimator + alpha * error append ( [ currentTime , estimator ] ) lastT = resultList [ - 1 ] return TimeSeries . from_twodim_list ( resultList ) | Creates a new TimeSeries containing the smoothed and forcasted values . |
36,748 | def _get_parameter_intervals ( self ) : parameterIntervals = { } parameterIntervals [ "smoothingFactor" ] = [ 0.0 , 1.0 , False , False ] parameterIntervals [ "trendSmoothingFactor" ] = [ 0.0 , 1.0 , False , False ] return parameterIntervals | Returns the intervals for the methods parameter . |
36,749 | def _calculate_forecast ( self , originalTimeSeries , smoothedData , seasonValues , lastSmoothingParams ) : forecastResults = [ ] lastEstimator , lastSeasonValue , lastTrend = lastSmoothingParams seasonLength = self . get_parameter ( "seasonLength" ) currentTime = smoothedData [ - 1 ] [ 0 ] normalizedTimeDiff = currentTime - smoothedData [ - 2 ] [ 0 ] for m in xrange ( 1 , self . _parameters [ "valuesToForecast" ] + 1 ) : currentTime += normalizedTimeDiff lastSeasonValue = seasonValues [ ( len ( originalTimeSeries ) + m - 2 ) % seasonLength ] forecast = ( lastEstimator + m * lastTrend ) * lastSeasonValue forecastResults . append ( [ currentTime , forecast ] ) return forecastResults | Calculates the actual forecasted based on the input data . |
36,750 | def initSeasonFactors ( self , timeSeries ) : seasonLength = self . get_parameter ( "seasonLength" ) try : seasonValues = self . get_parameter ( "seasonValues" ) assert seasonLength == len ( seasonValues ) , "Preset Season Values have to have to be of season's length" return seasonValues except KeyError : pass seasonValues = [ ] completeCycles = len ( timeSeries ) / seasonLength A = { } for i in xrange ( seasonLength ) : c_i = 0 for j in xrange ( completeCycles ) : if j not in A : A [ j ] = self . computeA ( j , timeSeries ) c_i += timeSeries [ ( seasonLength * j ) + i ] [ 1 ] / A [ j ] c_i /= completeCycles seasonValues . append ( c_i ) return seasonValues | Computes the initial season smoothing factors . |
36,751 | def initialTrendSmoothingFactors ( self , timeSeries ) : result = 0.0 seasonLength = self . get_parameter ( "seasonLength" ) k = min ( len ( timeSeries ) - seasonLength , seasonLength ) for i in xrange ( 0 , k ) : result += ( timeSeries [ seasonLength + i ] [ 1 ] - timeSeries [ i ] [ 1 ] ) / seasonLength return result / k | Calculate the initial Trend smoothing Factor b0 . |
36,752 | def computeA ( self , j , timeSeries ) : seasonLength = self . get_parameter ( "seasonLength" ) A_j = 0 for i in range ( seasonLength ) : A_j += timeSeries [ ( seasonLength * ( j ) ) + i ] [ 1 ] return A_j / seasonLength | Calculates A_j . Aj is the average value of x in the jth cycle of your data |
36,753 | def frequency_of_peaks ( self , x , start_offset = 100 , end_offset = 100 ) : peaks_data = x [ start_offset : - end_offset ] . values maxtab , mintab = peakdet ( peaks_data , self . delta ) x = np . mean ( peaks_data [ maxtab [ 1 : , 0 ] . astype ( int ) ] - peaks_data [ maxtab [ : - 1 , 0 ] . astype ( int ) ] ) frequency_of_peaks = abs ( 1 / x ) return frequency_of_peaks | This method assess the frequency of the peaks on any given 1 - dimensional time series . |
36,754 | def walk_regularity_symmetry ( self , data_frame ) : def _symmetry ( v ) : maxtab , _ = peakdet ( v , self . delta ) return maxtab [ 1 ] [ 1 ] , maxtab [ 2 ] [ 1 ] step_regularity_x , stride_regularity_x = _symmetry ( autocorrelation ( data_frame . x ) ) step_regularity_y , stride_regularity_y = _symmetry ( autocorrelation ( data_frame . y ) ) step_regularity_z , stride_regularity_z = _symmetry ( autocorrelation ( data_frame . z ) ) symmetry_x = step_regularity_x - stride_regularity_x symmetry_y = step_regularity_y - stride_regularity_y symmetry_z = step_regularity_z - stride_regularity_z step_regularity = np . array ( [ step_regularity_x , step_regularity_y , step_regularity_z ] ) stride_regularity = np . array ( [ stride_regularity_x , stride_regularity_y , stride_regularity_z ] ) walk_symmetry = np . array ( [ symmetry_x , symmetry_y , symmetry_z ] ) return step_regularity , stride_regularity , walk_symmetry | This method extracts the step and stride regularity and also walk symmetry . |
36,755 | def heel_strikes ( self , x ) : data = x . values data -= data . mean ( ) filtered = butter_lowpass_filter ( data , self . sampling_frequency , self . cutoff_frequency , self . filter_order ) transitions = crossings_nonzero_pos2neg ( filtered ) strike_indices_smooth = [ ] filter_threshold = np . abs ( self . delta * np . max ( filtered ) ) for i in range ( 1 , np . size ( transitions ) ) : segment = range ( transitions [ i - 1 ] , transitions [ i ] ) imax = np . argmax ( filtered [ segment ] ) if filtered [ segment [ imax ] ] > filter_threshold : strike_indices_smooth . append ( segment [ imax ] ) interpeak = compute_interpeak ( data , self . sampling_frequency ) decel = np . int ( interpeak / 2 ) strikes_idx = [ ] for ismooth in strike_indices_smooth : istrike = np . argmax ( data [ ismooth - decel : ismooth + decel ] ) istrike = istrike + ismooth - decel strikes_idx . append ( istrike ) strikes = np . asarray ( strikes_idx ) strikes -= strikes [ 0 ] strikes = strikes / self . sampling_frequency return strikes , np . array ( strikes_idx ) | Estimate heel strike times between sign changes in accelerometer data . |
36,756 | def gait_regularity_symmetry ( self , x , average_step_duration = 'autodetect' , average_stride_duration = 'autodetect' , unbias = 1 , normalize = 2 ) : if ( average_step_duration == 'autodetect' ) or ( average_stride_duration == 'autodetect' ) : strikes , _ = self . heel_strikes ( x ) step_durations = [ ] for i in range ( 1 , np . size ( strikes ) ) : step_durations . append ( strikes [ i ] - strikes [ i - 1 ] ) average_step_duration = np . mean ( step_durations ) number_of_steps = np . size ( strikes ) strides1 = strikes [ 0 : : 2 ] strides2 = strikes [ 1 : : 2 ] stride_durations1 = [ ] for i in range ( 1 , np . size ( strides1 ) ) : stride_durations1 . append ( strides1 [ i ] - strides1 [ i - 1 ] ) stride_durations2 = [ ] for i in range ( 1 , np . size ( strides2 ) ) : stride_durations2 . append ( strides2 [ i ] - strides2 [ i - 1 ] ) strides = [ strides1 , strides2 ] stride_durations = [ stride_durations1 , stride_durations2 ] average_stride_duration = np . mean ( ( np . mean ( stride_durations1 ) , np . mean ( stride_durations2 ) ) ) return self . gait_regularity_symmetry ( x , average_step_duration , average_stride_duration ) else : coefficients , _ = autocorrelate ( x , unbias = 1 , normalize = 2 ) step_period = np . int ( np . round ( 1 / average_step_duration ) ) stride_period = np . int ( np . round ( 1 / average_stride_duration ) ) step_regularity = coefficients [ step_period ] stride_regularity = coefficients [ stride_period ] symmetry = np . abs ( stride_regularity - step_regularity ) return step_regularity , stride_regularity , symmetry | Compute step and stride regularity and symmetry from accelerometer data with the help of steps and strides . |
36,757 | def gait ( self , x ) : data = x strikes , _ = self . heel_strikes ( data ) step_durations = [ ] for i in range ( 1 , np . size ( strikes ) ) : step_durations . append ( strikes [ i ] - strikes [ i - 1 ] ) avg_step_duration = np . mean ( step_durations ) sd_step_durations = np . std ( step_durations ) number_of_steps = np . size ( strikes ) strides1 = strikes [ 0 : : 2 ] strides2 = strikes [ 1 : : 2 ] stride_durations1 = [ ] for i in range ( 1 , np . size ( strides1 ) ) : stride_durations1 . append ( strides1 [ i ] - strides1 [ i - 1 ] ) stride_durations2 = [ ] for i in range ( 1 , np . size ( strides2 ) ) : stride_durations2 . append ( strides2 [ i ] - strides2 [ i - 1 ] ) strides = [ strides1 , strides2 ] stride_durations = [ stride_durations1 , stride_durations2 ] avg_number_of_strides = np . mean ( [ np . size ( strides1 ) , np . size ( strides2 ) ] ) avg_stride_duration = np . mean ( ( np . mean ( stride_durations1 ) , np . mean ( stride_durations2 ) ) ) sd_stride_durations = np . mean ( ( np . std ( stride_durations1 ) , np . std ( stride_durations2 ) ) ) step_period = np . int ( np . round ( 1 / avg_step_duration ) ) stride_period = np . int ( np . round ( 1 / avg_stride_duration ) ) step_regularity , stride_regularity , symmetry = self . gait_regularity_symmetry ( data , average_step_duration = avg_step_duration , average_stride_duration = avg_stride_duration ) cadence = None if self . duration : cadence = number_of_steps / self . duration velocity = None avg_step_length = None avg_stride_length = None if self . distance : velocity = self . distance / self . duration avg_step_length = number_of_steps / self . distance avg_stride_length = avg_number_of_strides / self . distance return [ number_of_steps , cadence , velocity , avg_step_length , avg_stride_length , step_durations , avg_step_duration , sd_step_durations , strides , stride_durations , avg_number_of_strides , avg_stride_duration , sd_stride_durations , step_regularity , stride_regularity , symmetry ] | Extract gait features from estimated heel strikes and accelerometer data . |
36,758 | def add_manual_segmentation_to_data_frame ( self , data_frame , segmentation_dictionary ) : data_frame [ 'segmentation' ] = 'unknown' for i , ( k , v ) in enumerate ( segmentation_dictionary . items ( ) ) : for start , end in v : if type ( start ) != np . datetime64 : if start < 0 : start = 0 if end > data_frame . size : end = data_frame . size start = data_frame . index . values [ start ] end = data_frame . index . values [ end ] data_frame . loc [ start : end , 'segmentation' ] = k return data_frame | Utility method to store manual segmentation of gait time series . |
36,759 | def error_wrapper ( error , errorClass ) : http_status = 0 if error . check ( TwistedWebError ) : xml_payload = error . value . response if error . value . status : http_status = int ( error . value . status ) else : error . raiseException ( ) if http_status >= 400 : if not xml_payload : error . raiseException ( ) try : fallback_error = errorClass ( xml_payload , error . value . status , str ( error . value ) , error . value . response ) except ( ParseError , AWSResponseParseError ) : error_message = http . RESPONSES . get ( http_status ) fallback_error = TwistedWebError ( http_status , error_message , error . value . response ) raise fallback_error elif 200 <= http_status < 300 : return str ( error . value ) else : error . raiseException ( ) | We want to see all error messages from cloud services . Amazon s EC2 says that their errors are accompanied either by a 400 - series or 500 - series HTTP response code . As such the first thing we want to do is check to see if the error is in that range . If it is we then need to see if the error message is an EC2 one . |
36,760 | def _headers ( self , headers_dict ) : return Headers ( dict ( ( k , [ v ] ) for ( k , v ) in headers_dict . items ( ) ) ) | Convert dictionary of headers into twisted . web . client . Headers object . |
36,761 | def _unpack_headers ( self , headers ) : return dict ( ( k , v [ 0 ] ) for ( k , v ) in headers . getAllRawHeaders ( ) ) | Unpack twisted . web . client . Headers object to dict . This is to provide backwards compatability . |
36,762 | def get_request_headers ( self , * args , ** kwds ) : if self . request_headers : return self . _unpack_headers ( self . request_headers ) | A convenience method for obtaining the headers that were sent to the S3 server . |
36,763 | def _handle_response ( self , response ) : self . client . status = response . code self . response_headers = response . headers if self . _method . upper ( ) == 'HEAD' or response . code == NO_CONTENT : return succeed ( '' ) receiver = self . receiver_factory ( ) receiver . finished = d = Deferred ( ) receiver . content_length = response . length response . deliverBody ( receiver ) if response . code >= 400 : d . addCallback ( self . _fail_response , response ) return d | Handle the HTTP response by memoing the headers and then delivering bytes . |
36,764 | def get_response_headers ( self , * args , ** kwargs ) : if self . response_headers : return self . _unpack_headers ( self . response_headers ) | A convenience method for obtaining the headers that were sent from the S3 server . |
36,765 | def create_jwt_token ( secret , client_id ) : assert secret , "Missing secret key" assert client_id , "Missing client id" headers = { "typ" : __type__ , "alg" : __algorithm__ } claims = { 'iss' : client_id , 'iat' : epoch_seconds ( ) } return jwt . encode ( payload = claims , key = secret , headers = headers ) . decode ( ) | Create JWT token for GOV . UK Notify |
36,766 | def decode_jwt_token ( token , secret ) : try : decoded_token = jwt . decode ( token , key = secret . encode ( ) , verify = True , algorithms = [ __algorithm__ ] , leeway = __bound__ ) if 'iss' not in decoded_token : raise TokenIssuerError if 'iat' not in decoded_token : raise TokenIssuedAtError now = epoch_seconds ( ) iat = int ( decoded_token [ 'iat' ] ) if now > ( iat + __bound__ ) : raise TokenExpiredError ( "Token has expired" , decoded_token ) if iat > ( now + __bound__ ) : raise TokenExpiredError ( "Token can not be in the future" , decoded_token ) return True except jwt . InvalidIssuedAtError : raise TokenExpiredError ( "Token has invalid iat field" , decode_token ( token ) ) except jwt . DecodeError : raise TokenDecodeError | Validates and decodes the JWT token Token checked for - signature of JWT token - token issued date is valid |
36,767 | def parse ( stream , with_text = False ) : buffer = '' text_buffer = '' in_lexical_unit = False in_superblank = False for char in stream : if in_superblank : if char == ']' : in_superblank = False text_buffer += char elif char == '\\' : text_buffer += char text_buffer += next ( stream ) else : text_buffer += char elif in_lexical_unit : if char == '$' : if with_text : yield ( text_buffer , LexicalUnit ( buffer ) ) else : yield LexicalUnit ( buffer ) buffer = '' text_buffer = '' in_lexical_unit = False elif char == '\\' : buffer += char buffer += next ( stream ) else : buffer += char else : if char == '[' : in_superblank = True text_buffer += char elif char == '^' : in_lexical_unit = True elif char == '\\' : text_buffer += char text_buffer += next ( stream ) else : text_buffer += char | Generates lexical units from a character stream . |
36,768 | def from_twodim_list ( cls , datalist , tsformat = None ) : ts = TimeSeries ( ) ts . set_timeformat ( tsformat ) for entry in datalist : ts . add_entry ( * entry [ : 2 ] ) ts . _normalized = ts . is_normalized ( ) ts . sort_timeseries ( ) return ts | Creates a new TimeSeries instance from the data stored inside a two dimensional list . |
36,769 | def initialize_from_sql_cursor ( self , sqlcursor ) : tuples = 0 data = sqlcursor . fetchmany ( ) while 0 < len ( data ) : for entry in data : self . add_entry ( str ( entry [ 0 ] ) , entry [ 1 ] ) data = sqlcursor . fetchmany ( ) self . _normalized = self . _check_normalization return tuples | Initializes the TimeSeries s data from the given SQL cursor . |
36,770 | def convert_timestamp_to_epoch ( cls , timestamp , tsformat ) : return time . mktime ( time . strptime ( timestamp , tsformat ) ) | Converts the given timestamp into a float representing UNIX - epochs . |
36,771 | def convert_epoch_to_timestamp ( cls , timestamp , tsformat ) : return time . strftime ( tsformat , time . gmtime ( timestamp ) ) | Converts the given float representing UNIX - epochs into an actual timestamp . |
36,772 | def sort_timeseries ( self , ascending = True ) : if ascending and self . _sorted : return sortorder = 1 if not ascending : sortorder = - 1 self . _predefinedSorted = False self . _timeseriesData . sort ( key = lambda i : sortorder * i [ 0 ] ) self . _sorted = ascending return self | Sorts the data points within the TimeSeries according to their occurrence inline . |
36,773 | def sorted_timeseries ( self , ascending = True ) : sortorder = 1 if not ascending : sortorder = - 1 data = sorted ( self . _timeseriesData , key = lambda i : sortorder * i [ 0 ] ) newTS = TimeSeries ( self . _normalized ) for entry in data : newTS . add_entry ( * entry ) newTS . _sorted = ascending return newTS | Returns a sorted copy of the TimeSeries preserving the original one . |
36,774 | def normalize ( self , normalizationLevel = "minute" , fusionMethod = "mean" , interpolationMethod = "linear" ) : if self . _normalizationLevel == normalizationLevel : if self . _normalized : return if normalizationLevel not in NormalizationLevels : raise ValueError ( "Normalization level %s is unknown." % normalizationLevel ) if fusionMethod not in FusionMethods : raise ValueError ( "Fusion method %s is unknown." % fusionMethod ) if interpolationMethod not in InterpolationMethods : raise ValueError ( "Interpolation method %s is unknown." % interpolationMethod ) if len ( self ) < 2 : self . _normalized = True return self . _normalizationLevel = normalizationLevel normalizationLevel = NormalizationLevels [ normalizationLevel ] fusionMethod = FusionMethods [ fusionMethod ] interpolationMethod = InterpolationMethods [ interpolationMethod ] self . sort_timeseries ( ) start = self . _timeseriesData [ 0 ] [ 0 ] end = self . _timeseriesData [ - 1 ] [ 0 ] span = end - start bucketcnt = int ( span / normalizationLevel ) + 1 buckethalfwidth = normalizationLevel / 2.0 bucketstart = start + buckethalfwidth buckets = [ [ bucketstart + idx * normalizationLevel ] for idx in xrange ( bucketcnt ) ] tsdStartIdx = 0 tsdEndIdx = 0 tsdlength = len ( self ) for idx in xrange ( bucketcnt ) : bucket = buckets [ idx ] bucketend = bucket [ 0 ] + buckethalfwidth while tsdEndIdx < tsdlength and self . _timeseriesData [ tsdEndIdx ] [ 0 ] < bucketend : tsdEndIdx += 1 if tsdStartIdx == tsdEndIdx : continue values = [ i [ 1 ] for i in self . _timeseriesData [ tsdStartIdx : tsdEndIdx ] ] bucket . append ( fusionMethod ( values ) ) tsdStartIdx = tsdEndIdx missingCount = 0 lastIdx = 0 for idx in xrange ( bucketcnt ) : if 1 == len ( buckets [ idx ] ) : missingCount += 1 continue if idx == 0 : lastIdx = idx continue if 0 == missingCount : lastIdx = idx continue missingValues = interpolationMethod ( buckets [ lastIdx ] [ 1 ] , buckets [ idx ] [ 1 ] , missingCount ) for idx2 in xrange ( 1 , missingCount + 1 ) : buckets [ lastIdx + idx2 ] . append ( missingValues [ idx2 - 1 ] ) lastIdx = idx missingCount = 0 self . _timeseriesData = buckets self . _normalized = True | Normalizes the TimeSeries data points . |
36,775 | def _check_normalization ( self ) : lastDistance = None distance = None for idx in xrange ( len ( self ) - 1 ) : distance = self [ idx + 1 ] [ 0 ] - self [ idx ] [ 0 ] if lastDistance is None : lastDistance = distance continue if lastDistance != distance : return False lastDistance = distance return True | Checks if the TimeSeries is normalized . |
36,776 | def sample ( self , percentage ) : if not ( 0.0 < percentage < 1.0 ) : raise ValueError ( "Parameter percentage has to be in (0.0, 1.0)." ) cls = self . __class__ value_count = int ( len ( self ) * percentage ) values = random . sample ( self , value_count ) sample = cls . from_twodim_list ( values ) rest_values = self . _timeseriesData [ : ] for value in values : rest_values . remove ( value ) rest = cls . from_twodim_list ( rest_values ) return sample , rest | Samples with replacement from the TimeSeries . Returns the sample and the remaining timeseries . The original timeseries is not changed . |
36,777 | def from_twodim_list ( cls , datalist , tsformat = None , dimensions = 1 ) : ts = MultiDimensionalTimeSeries ( dimensions = dimensions ) ts . set_timeformat ( tsformat ) for entry in datalist : ts . add_entry ( entry [ 0 ] , entry [ 1 ] ) ts . _normalized = ts . is_normalized ( ) ts . sort_timeseries ( ) return ts | Creates a new MultiDimensionalTimeSeries instance from the data stored inside a two dimensional list . |
36,778 | def _writeloop ( self , consumer ) : while True : bytes = self . _inputFile . read ( self . _readSize ) if not bytes : self . _inputFile . close ( ) break consumer . write ( bytes ) yield None | Return an iterator which reads one chunk of bytes from the input file and writes them to the consumer for each time it is iterated . |
36,779 | def run ( self ) : progress_printer = ProgressPrinter ( self . different_items . total_items ( ) , msg_verb = 'sending' ) upload_settings = UploadSettings ( self . config , self . remote_store . data_service , progress_printer , self . project_name_or_id , self . file_upload_post_processor ) project_uploader = ProjectUploader ( upload_settings ) project_uploader . run ( self . local_project ) progress_printer . finished ( ) | Upload different items within local_project to remote store showing a progress bar . |
36,780 | def get_upload_report ( self ) : project = self . remote_store . fetch_remote_project ( self . project_name_or_id , must_exist = True , include_children = False ) report = UploadReport ( project . name ) report . walk_project ( self . local_project ) return report . get_content ( ) | Generate and print a report onto stdout . |
36,781 | def get_url_msg ( self ) : msg = 'URL to view project' project_id = self . local_project . remote_id url = '{}: https://{}/#/project/{}' . format ( msg , self . config . get_portal_url_base ( ) , project_id ) return url | Print url to view the project via dds portal . |
36,782 | def _send_file_external_with_retry ( self , http_verb , host , url , http_headers , chunk ) : count = 0 retry_times = 1 if http_verb == 'PUT' : retry_times = SEND_EXTERNAL_PUT_RETRY_TIMES while True : try : return self . data_service . send_external ( http_verb , host , url , http_headers , chunk ) except requests . exceptions . ConnectionError : count += 1 if count < retry_times : if count == 1 : self . _show_retry_warning ( host ) time . sleep ( SEND_EXTERNAL_RETRY_SECONDS ) self . data_service . recreate_requests_session ( ) else : raise | Send chunk to host url using http_verb . If http_verb is PUT and a connection error occurs retry a few times . Pauses between retries . Raises if unsuccessful . |
36,783 | def run ( self ) : processes = [ ] progress_queue = ProgressQueue ( Queue ( ) ) num_chunks = ParallelChunkProcessor . determine_num_chunks ( self . config . upload_bytes_per_chunk , self . local_file . size ) work_parcels = ParallelChunkProcessor . make_work_parcels ( self . config . upload_workers , num_chunks ) for ( index , num_items ) in work_parcels : processes . append ( self . make_and_start_process ( index , num_items , progress_queue ) ) wait_for_processes ( processes , num_chunks , progress_queue , self . watcher , self . local_file ) | Sends contents of a local file to a remote data service . |
36,784 | def send ( self ) : sent_chunks = 0 chunk_num = self . index with open ( self . filename , 'rb' ) as infile : infile . seek ( self . index * self . chunk_size ) while sent_chunks != self . num_chunks_to_send : chunk = infile . read ( self . chunk_size ) self . _send_chunk ( chunk , chunk_num ) self . progress_queue . processed ( 1 ) chunk_num += 1 sent_chunks += 1 | For each chunk we need to send create upload url and send bytes . Raises exception on error . |
36,785 | def _in_valid_interval ( self , parameter , value ) : if parameter not in self . _parameterIntervals : return True interval = self . _parameterIntervals [ parameter ] if interval [ 2 ] and interval [ 3 ] : return interval [ 0 ] <= value <= interval [ 1 ] if not interval [ 2 ] and interval [ 3 ] : return interval [ 0 ] < value <= interval [ 1 ] if interval [ 2 ] and not interval [ 3 ] : return interval [ 0 ] <= value < interval [ 1 ] return interval [ 0 ] < value < interval [ 1 ] | Returns if the parameter is within its valid interval . |
36,786 | def _get_value_error_message_for_invalid_prarameter ( self , parameter , value ) : if parameter not in self . _parameterIntervals : return interval = self . _parameterIntervals [ parameter ] return "%s has to be in %s%s, %s%s. Current value is %s." % ( parameter , BaseMethod . _interval_definitions [ interval [ 2 ] ] [ 0 ] , interval [ 0 ] , interval [ 1 ] , BaseMethod . _interval_definitions [ interval [ 3 ] ] [ 1 ] , value ) | Returns the ValueError message for the given parameter . |
36,787 | def set_parameter ( self , name , value ) : if not self . _in_valid_interval ( name , value ) : raise ValueError ( self . _get_value_error_message_for_invalid_prarameter ( name , value ) ) self . _parameters [ name ] = value | Sets a parameter for the BaseMethod . |
36,788 | def can_be_executed ( self ) : missingParams = filter ( lambda rp : rp not in self . _parameters , self . _requiredParameters ) return len ( missingParams ) == 0 | Returns if the method can already be executed . |
36,789 | def set_parameter ( self , name , value ) : if name == "valuesToForecast" : self . _forecastUntil = None return super ( BaseForecastingMethod , self ) . set_parameter ( name , value ) | Sets a parameter for the BaseForecastingMethod . |
36,790 | def _calculate_values_to_forecast ( self , timeSeries ) : if self . _forecastUntil is None : return if not timeSeries . is_sorted ( ) : raise ValueError ( "timeSeries has to be sorted." ) if not timeSeries . is_normalized ( ) : raise ValueError ( "timeSeries has to be normalized." ) timediff = timeSeries [ - 1 ] [ 0 ] - timeSeries [ - 2 ] [ 0 ] forecastSpan = self . _forecastUntil - timeSeries [ - 1 ] [ 0 ] self . set_parameter ( "valuesToForecast" , int ( forecastSpan / timediff ) + 1 ) | Calculates the number of values that need to be forecasted to match the goal set in forecast_until . |
36,791 | def execute ( self , timeSeries ) : windowsize = self . _parameters [ "windowsize" ] if len ( timeSeries ) < windowsize : raise ValueError ( "windowsize is larger than the number of elements in timeSeries." ) tsLength = len ( timeSeries ) nbrOfLoopRuns = tsLength - windowsize + 1 res = TimeSeries ( ) for idx in xrange ( nbrOfLoopRuns ) : end = idx + windowsize data = timeSeries [ idx : end ] timestamp = data [ windowsize // 2 ] [ 0 ] value = sum ( [ i [ 1 ] for i in data ] ) / windowsize res . add_entry ( timestamp , value ) res . sort_timeseries ( ) return res | Creates a new TimeSeries containing the SMA values for the predefined windowsize . |
36,792 | def _create_child ( self , tag ) : return etree . SubElement ( self . _root , self . _get_namespace_tag ( tag ) ) | Create a new child element with the given tag . |
36,793 | def create ( self , root = None , namespace = None ) : if root is not None : tag = root . tag if root . nsmap : namespace = root . nsmap [ None ] tag = tag [ len ( namespace ) + 2 : ] if tag != self . tag : raise WSDLParseError ( "Expected response with tag '%s', but " "got '%s' instead" % ( self . tag , tag ) ) return SequenceItem ( self , root , namespace ) | Create a sequence element with the given root . |
36,794 | def set ( self , child , min_occurs = 1 , max_occurs = 1 ) : if isinstance ( child , LeafSchema ) : raise RuntimeError ( "Sequence can't have leaf children" ) if self . child is not None : raise RuntimeError ( "Sequence has already a child" ) if min_occurs is None or max_occurs is None : raise RuntimeError ( "Sequence node without min or max" ) if isinstance ( child , LeafSchema ) : raise RuntimeError ( "Sequence node with leaf child type" ) if not child . tag == "item" : raise RuntimeError ( "Sequence node with bad child tag" ) self . child = child self . min_occurs = min_occurs self . max_occurs = max_occurs return child | Set the schema for the sequence children . |
36,795 | def append ( self ) : tag = self . _schema . tag children = self . _root . getchildren ( ) if len ( children ) >= self . _schema . max_occurs : raise WSDLParseError ( "Too many items in tag '%s'" % tag ) schema = self . _schema . child tag = "item" if self . _namespace is not None : tag = "{%s}%s" % ( self . _namespace , tag ) child = etree . SubElement ( self . _root , tag ) return schema . create ( child ) | Append a new item to the sequence appending it to the end . |
36,796 | def _get_child ( self , children , index ) : try : return children [ index ] except IndexError : raise WSDLParseError ( "Non existing item in tag '%s'" % self . _schema . tag ) | Return the child with the given index . |
36,797 | def _parse_type ( self , element , types ) : name = element . attrib [ "name" ] type = element . attrib [ "type" ] if not type . startswith ( "tns:" ) : raise RuntimeError ( "Unexpected element type %s" % type ) type = type [ 4 : ] [ children ] = types [ type ] [ 0 ] types [ type ] [ 1 ] = True self . _remove_namespace_from_tag ( children ) if children . tag not in ( "sequence" , "choice" ) : raise RuntimeError ( "Unexpected children type %s" % children . tag ) if children [ 0 ] . attrib [ "name" ] == "item" : schema = SequenceSchema ( name ) else : schema = NodeSchema ( name ) for child in children : self . _remove_namespace_from_tag ( child ) if child . tag == "element" : name , type , min_occurs , max_occurs = self . _parse_child ( child ) if type in self . leaf_types : if max_occurs != 1 : raise RuntimeError ( "Unexpected max value for leaf" ) if not isinstance ( schema , NodeSchema ) : raise RuntimeError ( "Attempt to add leaf to a non-node" ) schema . add ( LeafSchema ( name ) , min_occurs = min_occurs ) else : if name == "item" : if not isinstance ( schema , SequenceSchema ) : raise RuntimeError ( "Attempt to set child for " "non-sequence" ) schema . set ( self . _parse_type ( child , types ) , min_occurs = min_occurs , max_occurs = max_occurs ) else : if max_occurs != 1 : raise RuntimeError ( "Unexpected max for node" ) if not isinstance ( schema , NodeSchema ) : raise RuntimeError ( "Unexpected schema type" ) schema . add ( self . _parse_type ( child , types ) , min_occurs = min_occurs ) elif child . tag == "choice" : pass else : raise RuntimeError ( "Unexpected child type" ) return schema | Parse a complexType element . |
36,798 | def _parse_child ( self , child ) : if set ( child . attrib ) - set ( [ "name" , "type" , "minOccurs" , "maxOccurs" ] ) : raise RuntimeError ( "Unexpected attribute in child" ) name = child . attrib [ "name" ] type = child . attrib [ "type" ] . split ( ":" ) [ 1 ] min_occurs = child . attrib . get ( "minOccurs" ) max_occurs = child . attrib . get ( "maxOccurs" ) if min_occurs is None : min_occurs = "1" min_occurs = int ( min_occurs ) if max_occurs is None : max_occurs = "1" if max_occurs != "unbounded" : max_occurs = int ( max_occurs ) return name , type , min_occurs , max_occurs | Parse a single child element . |
36,799 | def finished ( self ) : self . data_service . update_activity ( self . id , self . name , self . desc , started_on = self . started , ended_on = self . _current_timestamp_str ( ) ) | Mark the activity as finished |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.