idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
47,600
def customers ( self ) : return Customer . objects . filter ( Q ( privatelessoncustomer__lesson = self ) | Q ( registration__eventregistration__event = self ) ) . distinct ( )
List both any individuals signed up via the registration and payment system and any individuals signed up without payment .
47,601
def save ( self , * args , ** kwargs ) : if not self . status : self . status == Event . RegStatus . hidden super ( PrivateLessonEvent , self ) . save ( * args , ** kwargs )
Set registration status to hidden if it is not specified otherwise
47,602
def availableDurations ( self ) : potential_slots = InstructorAvailabilitySlot . objects . filter ( instructor = self . instructor , location = self . location , room = self . room , pricingTier = self . pricingTier , startTime__gte = self . startTime , startTime__lte = self . startTime + timedelta ( minutes = getConstant ( 'privateLessons__maximumLessonLength' ) ) , ) . exclude ( id = self . id ) . order_by ( 'startTime' ) duration_list = [ self . duration , ] last_start = self . startTime last_duration = self . duration max_duration = self . duration for slot in potential_slots : if max_duration + slot . duration > getConstant ( 'privateLessons__maximumLessonLength' ) : break if ( slot . startTime == last_start + timedelta ( minutes = last_duration ) and slot . isAvailable ) : duration_list . append ( max_duration + slot . duration ) last_start = slot . startTime last_duration = slot . duration max_duration += slot . duration return duration_list
A lesson can always be booked for the length of a single slot but this method checks if multiple slots are available . This method requires that slots are non - overlapping which needs to be enforced on slot save .
47,603
def availableRoles ( self ) : if not hasattr ( self . instructor , 'instructorprivatelessondetails' ) : return [ ] return [ [ x . id , x . name ] for x in self . instructor . instructorprivatelessondetails . roles . all ( ) ]
Some instructors only offer private lessons for certain roles so we should only allow booking for the roles that have been selected for the instructor .
47,604
def checkIfAvailable ( self , dateTime = timezone . now ( ) ) : return ( self . startTime >= dateTime + timedelta ( days = getConstant ( 'privateLessons__closeBookingDays' ) ) and self . startTime <= dateTime + timedelta ( days = getConstant ( 'privateLessons__openBookingDays' ) ) and not self . eventRegistration and ( self . status == self . SlotStatus . available or ( self . status == self . SlotStatus . tentative and getattr ( getattr ( self . temporaryEventRegistration , 'registration' , None ) , 'expirationDate' , timezone . now ( ) ) <= timezone . now ( ) ) ) )
Available slots are available but also tentative slots that have been held as tentative past their expiration date
47,605
def json_event_feed ( request , location_id = None , room_id = None ) : if not getConstant ( 'calendar__privateCalendarFeedEnabled' ) or not request . user . is_staff : return JsonResponse ( { } ) this_user = request . user startDate = request . GET . get ( 'start' , '' ) endDate = request . GET . get ( 'end' , '' ) timeZone = request . GET . get ( 'timezone' , getattr ( settings , 'TIME_ZONE' , 'UTC' ) ) time_filter_dict_events = { } if startDate : time_filter_dict_events [ 'startTime__gte' ] = ensure_timezone ( datetime . strptime ( startDate , '%Y-%m-%d' ) ) if endDate : time_filter_dict_events [ 'endTime__lte' ] = ensure_timezone ( datetime . strptime ( endDate , '%Y-%m-%d' ) ) + timedelta ( days = 1 ) instructor_groups = list ( this_user . groups . all ( ) . values_list ( 'id' , flat = True ) ) filters = Q ( event__privateevent__isnull = False ) & ( Q ( event__privateevent__displayToGroup__in = instructor_groups ) | Q ( event__privateevent__displayToUsers = this_user ) | ( Q ( event__privateevent__displayToGroup__isnull = True ) & Q ( event__privateevent__displayToUsers__isnull = True ) ) ) if location_id : filters = filters & Q ( event__location__id = location_id ) if room_id : filters = filters & Q ( event__room_id = room_id ) occurrences = EventOccurrence . objects . filter ( filters ) . filter ( ** time_filter_dict_events ) . order_by ( '-startTime' ) eventlist = [ EventFeedItem ( x , timeZone = timeZone ) . __dict__ for x in occurrences ] return JsonResponse ( eventlist , safe = False )
The Jquery fullcalendar app requires a JSON news feed so this function creates the feed from upcoming PrivateEvent objects
47,606
def checkRequirements ( sender , ** kwargs ) : if not getConstant ( 'requirements__enableRequirements' ) : return logger . debug ( 'Signal to check RegistrationContactForm handled by prerequisites app.' ) formData = kwargs . get ( 'formData' , { } ) first = formData . get ( 'firstName' ) last = formData . get ( 'lastName' ) email = formData . get ( 'email' ) request = kwargs . get ( 'request' , { } ) registration = kwargs . get ( 'registration' , None ) customer = Customer . objects . filter ( first_name = first , last_name = last , email = email ) . first ( ) requirement_warnings = [ ] requirement_errors = [ ] for ter in registration . temporaryeventregistration_set . all ( ) : if hasattr ( ter . event , 'getRequirements' ) : for req in ter . event . getRequirements ( ) : if not req . customerMeetsRequirement ( customer = customer , danceRole = ter . role ) : if req . enforcementMethod == Requirement . EnforcementChoice . error : requirement_errors . append ( ( ter . event . name , req . name ) ) if req . enforcementMethod == Requirement . EnforcementChoice . warning : requirement_warnings . append ( ( ter . event . name , req . name ) ) if requirement_errors : raise ValidationError ( format_html ( '<p>{}</p> <ul>{}</ul> <p>{}</p>' , ugettext ( 'Unfortunately, you do not meet the following requirements/prerequisites for the items you have chosen:\n' ) , mark_safe ( '' . join ( [ '<li><em>%s:</em> %s</li>\n' % x for x in requirement_errors ] ) ) , getConstant ( 'requirements__errorMessage' ) or '' , ) ) if requirement_warnings : messages . warning ( request , format_html ( '<p>{}</p> <ul>{}</ul> <p>{}</p>' , mark_safe ( ugettext ( '<strong>Please Note:</strong> It appears that you do not meet the following requirements/prerequisites for the items you have chosen:\n' ) ) , mark_safe ( '' . join ( [ '<li><em>%s:</em> %s</li>\n' % x for x in requirement_warnings ] ) ) , getConstant ( 'requirements__warningMessage' ) or '' , ) )
Check that the customer meets all prerequisites for the items in the registration .
47,607
def get_email_context ( self , ** kwargs ) : context = kwargs context . update ( { 'currencyCode' : getConstant ( 'general__currencyCode' ) , 'currencySymbol' : getConstant ( 'general__currencySymbol' ) , 'businessName' : getConstant ( 'contact__businessName' ) , 'site_url' : getConstant ( 'email__linkProtocol' ) + '://' + Site . objects . get_current ( ) . domain , } ) return context
This method can be overridden in classes that inherit from this mixin so that additional object - specific context is provided to the email template . This should return a dictionary . By default only general financial context variables are added to the dictionary and kwargs are just passed directly . Note also that it is in general not a good idea for security reasons to pass model instances in the context here since these methods can be accessed by logged in users who use the SendEmailView . So In the default models of this app the values of fields and properties are passed directly instead .
47,608
def get_group_required ( self ) : this_object = self . model_object if hasattr ( this_object , self . group_required_field ) : if hasattr ( getattr ( this_object , self . group_required_field ) , 'name' ) : return [ getattr ( this_object , self . group_required_field ) . name ] return [ '' ]
Get the group_required value from the object
47,609
def check_membership ( self , groups ) : if not groups or groups == [ '' ] : return True if self . request . user . is_superuser : return True user_groups = self . request . user . groups . values_list ( "name" , flat = True ) return set ( groups ) . intersection ( set ( user_groups ) )
Allows for objects with no required groups
47,610
def dispatch ( self , request , * args , ** kwargs ) : self . request = request in_group = False required_group = self . get_group_required ( ) if not required_group or required_group == [ '' ] : in_group = True elif self . request . user . is_authenticated ( ) : in_group = self . check_membership ( required_group ) if not in_group : if self . raise_exception : raise PermissionDenied else : return redirect_to_login ( request . get_full_path ( ) , self . get_login_url ( ) , self . get_redirect_field_name ( ) ) return super ( GroupRequiredMixin , self ) . dispatch ( request , * args , ** kwargs )
This override of dispatch ensures that if no group is required then the request still goes through without being logged in .
47,611
def validate ( self , value ) : super ( ChoiceField , self ) . validate ( value ) try : get_template ( value ) except TemplateDoesNotExist : raise ValidationError ( _ ( '%s is not a valid template.' % value ) )
Check for empty values and for an existing template but do not check if this is one of the initial choices provided .
47,612
def render ( self , context , instance , placeholder ) : if instance and instance . template : self . render_template = instance . template return super ( PluginTemplateMixin , self ) . render ( context , instance , placeholder )
Permits setting of the template in the plugin instance configuration
47,613
def get_return_page ( self , prior = False ) : siteHistory = self . request . session . get ( 'SITE_HISTORY' , { } ) return getReturnPage ( siteHistory , prior = prior )
This is just a wrapper for the getReturnPage helper function .
47,614
def is_valid ( self ) : valid = super ( RegistrationContactForm , self ) . is_valid ( ) msgs = messages . get_messages ( self . _request ) prior_messages = self . _session . pop ( 'prior_messages' , [ ] ) remaining_messages = [ ] for m in msgs : m_dict = { 'message' : m . message , 'level' : m . level , 'extra_tags' : m . extra_tags } if m_dict not in prior_messages : remaining_messages . append ( m_dict ) if remaining_messages : self . _session [ 'prior_messages' ] = remaining_messages self . _request . session . modified = True return False return valid
For this form to be considered valid there must be not only no errors but also no messages on the request that need to be shown .
47,615
def clean_total_refund_amount ( self ) : initial = self . cleaned_data . get ( 'initial_refund_amount' , 0 ) total = self . cleaned_data [ 'total_refund_amount' ] summed_refunds = sum ( [ v for k , v in self . cleaned_data . items ( ) if k . startswith ( 'item_refundamount_' ) ] ) if not self . cleaned_data . get ( 'id' ) : raise ValidationError ( 'ID not in cleaned data' ) if summed_refunds != total : raise ValidationError ( _ ( 'Passed value does not match sum of allocated refunds.' ) ) elif summed_refunds > self . cleaned_data [ 'id' ] . amountPaid + self . cleaned_data [ 'id' ] . refunds : raise ValidationError ( _ ( 'Total refunds allocated exceed revenue received.' ) ) elif total < initial : raise ValidationError ( _ ( 'Cannot reduce the total amount of the refund.' ) ) return total
The Javascript should ensure that the hidden input is updated but double check it here .
47,616
def clean ( self ) : super ( SubstituteReportingForm , self ) . clean ( ) occurrences = self . cleaned_data . get ( 'occurrences' , [ ] ) staffMember = self . cleaned_data . get ( 'staffMember' ) replacementFor = self . cleaned_data . get ( 'replacedStaffMember' , [ ] ) event = self . cleaned_data . get ( 'event' ) for occ in occurrences : for this_sub in occ . eventstaffmember_set . all ( ) : if this_sub . replacedStaffMember == replacementFor : self . add_error ( 'occurrences' , ValidationError ( _ ( 'One or more classes you have selected already has a substitute teacher for that class.' ) , code = 'invalid' ) ) if event and staffMember : if staffMember in [ x . staffMember for x in event . eventstaffmember_set . filter ( category__in = [ getConstant ( 'general__eventStaffCategoryAssistant' ) , getConstant ( 'general__eventStaffCategoryInstructor' ) ] ) ] : self . add_error ( 'event' , ValidationError ( _ ( 'You cannot substitute teach for a class in which you were an instructor.' ) , code = 'invalid' ) )
This code prevents multiple individuals from substituting for the same class and class teacher . It also prevents an individual from substituting for a class in which they are a teacher .
47,617
def save ( self , commit = True ) : existing_record = EventStaffMember . objects . filter ( staffMember = self . cleaned_data . get ( 'staffMember' ) , event = self . cleaned_data . get ( 'event' ) , category = getConstant ( 'general__eventStaffCategorySubstitute' ) , replacedStaffMember = self . cleaned_data . get ( 'replacedStaffMember' ) , ) if existing_record . exists ( ) : record = existing_record . first ( ) for x in self . cleaned_data . get ( 'occurrences' ) : record . occurrences . add ( x ) record . save ( ) return record else : return super ( SubstituteReportingForm , self ) . save ( )
If a staff member is reporting substitute teaching for a second time then we should update the list of occurrences for which they are a substitute on their existing EventStaffMember record rather than creating a new record and creating database issues .
47,618
def save ( self , commit = True ) : if getattr ( self . instance , 'instructor' , None ) : self . instance . instructor . availableForPrivates = self . cleaned_data . pop ( 'availableForPrivates' , self . instance . instructor . availableForPrivates ) self . instance . instructor . save ( update_fields = [ 'availableForPrivates' , ] ) super ( StaffMemberBioChangeForm , self ) . save ( commit = True )
If the staff member is an instructor also update the availableForPrivates field on the Instructor record .
47,619
def save ( self , * args , ** kwargs ) : if not self . pluralName : self . pluralName = self . name + 's' super ( self . __class__ , self ) . save ( * args , ** kwargs )
Just add s if no plural name given .
47,620
def getBasePrice ( self , ** kwargs ) : payAtDoor = kwargs . get ( 'payAtDoor' , False ) dropIns = kwargs . get ( 'dropIns' , 0 ) if dropIns : return dropIns * self . dropinPrice if payAtDoor : return self . doorPrice return self . onlinePrice
This handles the logic of finding the correct price . If more sophisticated discounting systems are needed then this PricingTier model can be subclassed or the discounts and vouchers apps can be used .
47,621
def availableRoles ( self ) : eventRoles = self . eventrole_set . filter ( capacity__gt = 0 ) if eventRoles . count ( ) > 0 : return [ x . role for x in eventRoles ] elif isinstance ( self , Series ) : return self . classDescription . danceTypeLevel . danceType . roles . all ( ) return [ ]
Returns the set of roles for this event . Since roles are not always custom specified for event this looks for the set of available roles in multiple places . If no roles are found then the method returns an empty list in which case it can be assumed that the event s registration is not role - specific .
47,622
def numRegisteredForRole ( self , role , includeTemporaryRegs = False ) : count = self . eventregistration_set . filter ( cancelled = False , dropIn = False , role = role ) . count ( ) if includeTemporaryRegs : count += self . temporaryeventregistration_set . filter ( dropIn = False , role = role ) . exclude ( registration__expirationDate__lte = timezone . now ( ) ) . count ( ) return count
Accepts a DanceRole object and returns the number of registrations of that role .
47,623
def capacityForRole ( self , role ) : if isinstance ( role , DanceRole ) : role_id = role . id else : role_id = role eventRoles = self . eventrole_set . filter ( capacity__gt = 0 ) if eventRoles . count ( ) > 0 and role_id not in [ x . role . id for x in eventRoles ] : return 0 elif eventRoles . count ( ) > 0 : return eventRoles . get ( role = role ) . capacity if isinstance ( self , Series ) : try : availableRoles = self . classDescription . danceTypeLevel . danceType . roles . all ( ) if availableRoles . count ( ) > 0 and role_id not in [ x . id for x in availableRoles ] : return 0 elif availableRoles . count ( ) > 0 and self . capacity : return ceil ( self . capacity / availableRoles . count ( ) ) except ObjectDoesNotExist as e : logger . error ( 'Error in calculating capacity for role: %s' % e ) return self . capacity
Accepts a DanceRole object and determines the capacity for that role at this event . this Since roles are not always custom specified for events this looks for the set of available roles in multiple places and only returns the overall capacity of the event if roles are not found elsewhere .
47,624
def soldOutForRole ( self , role , includeTemporaryRegs = False ) : return self . numRegisteredForRole ( role , includeTemporaryRegs = includeTemporaryRegs ) >= ( self . capacityForRole ( role ) or 0 )
Accepts a DanceRole object and responds if the number of registrations for that role exceeds the capacity for that role at this event .
47,625
def allDayForDate ( self , this_date , timeZone = None ) : if isinstance ( this_date , datetime ) : d = this_date . date ( ) else : d = this_date date_start = datetime ( d . year , d . month , d . day ) naive_start = self . startTime if timezone . is_naive ( self . startTime ) else timezone . make_naive ( self . startTime , timezone = timeZone ) naive_end = self . endTime if timezone . is_naive ( self . endTime ) else timezone . make_naive ( self . endTime , timezone = timeZone ) return ( naive_start <= date_start and naive_end >= date_start + timedelta ( days = 1 , minutes = - 30 ) )
This method determines whether the occurrence lasts the entirety of a specified day in the specified time zone . If no time zone is specified then it uses the default time zone ) . Also give a grace period of a few minutes to account for issues with the way events are sometimes entered .
47,626
def netHours ( self ) : if self . specifiedHours is not None : return self . specifiedHours elif self . category in [ getConstant ( 'general__eventStaffCategoryAssistant' ) , getConstant ( 'general__eventStaffCategoryInstructor' ) ] : return self . event . duration - sum ( [ sub . netHours for sub in self . replacementFor . all ( ) ] ) else : return sum ( [ x . duration for x in self . occurrences . filter ( cancelled = False ) ] )
For regular event staff this is the net hours worked for financial purposes . For Instructors netHours is caclulated net of any substitutes .
47,627
def shortDescription ( self ) : cd = getattr ( self , 'classDescription' , None ) if cd : sd = getattr ( cd , 'shortDescription' , '' ) d = getattr ( cd , 'description' , '' ) return sd if sd else d return ''
Overrides property from Event base class .
47,628
def netHours ( self ) : if self . specifiedHours is not None : return self . specifiedHours return self . event . duration - sum ( [ sub . netHours for sub in self . replacementFor . all ( ) ] )
For regular event staff this is the net hours worked for financial purposes . For Instructors netHours is calculated net of any substitutes .
47,629
def getTimeOfClassesRemaining ( self , numClasses = 0 ) : occurrences = EventOccurrence . objects . filter ( cancelled = False , event__in = [ x . event for x in self . temporaryeventregistration_set . filter ( event__series__isnull = False ) ] , ) . order_by ( '-endTime' ) if occurrences . count ( ) > numClasses : return occurrences [ numClasses ] . endTime else : return occurrences . last ( ) . startTime
For checking things like prerequisites it s useful to check if a requirement is almost met
47,630
def finalize ( self , ** kwargs ) : dateTime = kwargs . pop ( 'dateTime' , timezone . now ( ) ) sendEmail = kwargs . pop ( 'sendEmail' , True ) customer , created = Customer . objects . update_or_create ( first_name = self . firstName , last_name = self . lastName , email = self . email , defaults = { 'phone' : self . phone } ) regArgs = { 'customer' : customer , 'firstName' : self . firstName , 'lastName' : self . lastName , 'dateTime' : dateTime , 'temporaryRegistration' : self } for key in [ 'comments' , 'howHeardAboutUs' , 'student' , 'priceWithDiscount' , 'payAtDoor' ] : regArgs [ key ] = kwargs . pop ( key , getattr ( self , key , None ) ) regArgs [ 'data' ] = self . data regArgs [ 'data' ] . update ( kwargs ) realreg = Registration ( ** regArgs ) realreg . save ( ) logger . debug ( 'Created registration with id: ' + str ( realreg . id ) ) for er in self . temporaryeventregistration_set . all ( ) : logger . debug ( 'Creating eventreg for event: ' + str ( er . event . id ) ) realer = EventRegistration ( registration = realreg , event = er . event , customer = customer , role = er . role , price = er . price , dropIn = er . dropIn , data = er . data ) realer . save ( ) self . expirationDate = timezone . now ( ) self . save ( ) post_registration . send ( sender = TemporaryRegistration , registration = realreg ) if sendEmail : if getConstant ( 'email__disableSiteEmails' ) : logger . info ( 'Sending of confirmation emails is disabled.' ) else : logger . info ( 'Sending confirmation email.' ) template = getConstant ( 'email__registrationSuccessTemplate' ) realreg . email_recipient ( subject = template . subject , content = template . content , html_content = template . html_content , send_html = template . send_html , from_address = template . defaultFromAddress , from_name = template . defaultFromName , cc = template . defaultCC , ) return realreg
This method is called when the payment process has been completed and a registration is ready to be finalized . It also fires the post - registration signal
47,631
def save ( self , * args , ** kwargs ) : if self . send_html : self . content = get_text_for_html ( self . html_content ) else : self . html_content = None super ( EmailTemplate , self ) . save ( * args , ** kwargs )
If this is an HTML template then set the non - HTML content to be the stripped version of the HTML . If this is a plain text template then set the HTML content to be null .
47,632
def create_from_registration ( cls , reg , ** kwargs ) : submissionUser = kwargs . pop ( 'submissionUser' , None ) collectedByUser = kwargs . pop ( 'collectedByUser' , None ) status = kwargs . pop ( 'status' , Invoice . PaymentStatus . unpaid ) new_invoice = cls ( firstName = reg . firstName , lastName = reg . lastName , email = reg . email , grossTotal = reg . totalPrice , total = reg . priceWithDiscount , submissionUser = submissionUser , collectedByUser = collectedByUser , buyerPaysSalesTax = getConstant ( 'registration__buyerPaysSalesTax' ) , status = status , data = kwargs , ) if isinstance ( reg , Registration ) : new_invoice . finalRegistration = reg ter_set = reg . eventregistration_set . all ( ) elif isinstance ( reg , TemporaryRegistration ) : new_invoice . temporaryRegistration = reg ter_set = reg . temporaryeventregistration_set . all ( ) else : raise ValueError ( 'Object passed is not a registration.' ) new_invoice . calculateTaxes ( ) new_invoice . save ( ) for ter in ter_set : item_kwargs = { 'invoice' : new_invoice , 'grossTotal' : ter . price , } if new_invoice . grossTotal > 0 : item_kwargs . update ( { 'total' : ter . price * ( new_invoice . total / new_invoice . grossTotal ) , 'taxes' : new_invoice . taxes * ( ter . price / new_invoice . grossTotal ) , 'fees' : new_invoice . fees * ( ter . price / new_invoice . grossTotal ) , } ) else : item_kwargs . update ( { 'total' : ter . price , 'taxes' : new_invoice . taxes , 'fees' : new_invoice . fees , } ) if isinstance ( ter , TemporaryEventRegistration ) : item_kwargs [ 'temporaryEventRegistration' ] = ter elif isinstance ( ter , EventRegistration ) : item_kwargs [ 'finalEventRegistration' ] = ter this_item = InvoiceItem ( ** item_kwargs ) this_item . save ( ) return new_invoice
Handles the creation of an Invoice as well as one InvoiceItem per assodciated TemporaryEventRegistration or registration . Also handles taxes appropriately .
47,633
def url ( self ) : if self . id : return '%s://%s%s' % ( getConstant ( 'email__linkProtocol' ) , Site . objects . get_current ( ) . domain , reverse ( 'viewInvoice' , args = [ self . id , ] ) , )
Because invoice URLs are generally emailed this includes the default site URL and the protocol specified in settings .
47,634
def calculateTaxes ( self ) : tax_rate = ( getConstant ( 'registration__salesTaxRate' ) or 0 ) / 100 if tax_rate > 0 : if self . buyerPaysSalesTax : self . taxes = self . total * tax_rate else : adjusted_total = self . total / ( 1 + tax_rate ) self . taxes = adjusted_total * tax_rate
Updates the tax field to reflect the amount of taxes depending on the local rate as well as whether the buyer or seller pays sales tax .
47,635
def allocateFees ( self ) : items = list ( self . invoiceitem_set . all ( ) ) if self . total != sum ( [ x . total for x in items ] ) : msg = _ ( 'Invoice item totals do not match invoice total. Unable to allocate fees.' ) logger . error ( str ( msg ) ) raise ValidationError ( msg ) if self . adjustments != sum ( [ x . adjustments for x in items ] ) : msg = _ ( 'Invoice item adjustments do not match invoice adjustments. Unable to allocate fees.' ) logger . error ( str ( msg ) ) raise ValidationError ( msg ) for item in items : saveFlag = False if self . total - self . adjustments > 0 : item . fees = self . fees * ( ( item . total - item . adjustments ) / ( self . total - self . adjustments ) ) saveFlag = True elif self . total - self . adjustments == 0 and self . total > 0 : item . fees = self . fees * ( item . total / self . total ) saveFlag = True elif self . fees : item . fees = self . fees * ( 1 / len ( items ) ) saveFlag = True if saveFlag : item . save ( )
Fees are allocated across invoice items based on their discounted total price net of adjustments as a proportion of the overall invoice s total price
47,636
def applyAndAllocate ( self , allocatedPrices , tieredTuples , payAtDoor = False ) : initial_net_price = sum ( [ x for x in allocatedPrices ] ) if self . discountType == self . DiscountType . flatPrice : applicable_price = self . getFlatPrice ( payAtDoor ) or 0 this_price = applicable_price + sum ( [ x [ 0 ] . event . getBasePrice ( payAtDoor = payAtDoor ) * x [ 1 ] if x [ 1 ] != 1 else x [ 0 ] . price for x in tieredTuples ] ) this_allocated_prices = [ x * ( this_price / initial_net_price ) for x in allocatedPrices ] elif self . discountType == self . DiscountType . dollarDiscount : this_price = initial_net_price - self . dollarDiscount this_allocated_prices = [ x * ( this_price / initial_net_price ) for x in allocatedPrices ] elif self . discountType == DiscountCombo . DiscountType . percentDiscount : if self . percentUniversallyApplied : this_price = initial_net_price * ( 1 - ( max ( min ( self . percentDiscount or 0 , 100 ) , 0 ) / 100 ) ) this_allocated_prices = [ x * ( this_price / initial_net_price ) for x in allocatedPrices ] else : this_price = 0 this_allocated_prices = [ ] for idx , val in enumerate ( tieredTuples ) : this_val = ( allocatedPrices [ idx ] * ( 1 - val [ 1 ] ) * ( 1 - ( max ( min ( self . percentDiscount or 0 , 100 ) , 0 ) / 100 ) ) + allocatedPrices [ idx ] * val [ 1 ] ) this_allocated_prices . append ( this_val ) this_price += this_val else : raise KeyError ( _ ( 'Invalid discount type.' ) ) if this_price < initial_net_price : this_price = max ( this_price , 0 ) return self . DiscountInfo ( self , this_price , initial_net_price - this_price , this_allocated_prices )
This method takes an initial allocation of prices across events and an identical length list of allocation tuples . It applies the rule specified by this discount allocates the discount across the listed items and returns both the price and the allocation
47,637
def getComponentList ( self ) : component_list = [ ] for x in self . discountcombocomponent_set . all ( ) : for y in range ( 0 , x . quantity ) : component_list += [ x ] component_list . sort ( key = lambda x : x . quantity , reverse = True ) return component_list
This function just returns a list with items that are supposed to be present in the the list multiple times as multiple elements of the list . It simplifies checking whether a discount s conditions are satisfied .
47,638
def save ( self , * args , ** kwargs ) : if self . discountType != self . DiscountType . flatPrice : self . onlinePrice = None self . doorPrice = None if self . discountType != self . DiscountType . dollarDiscount : self . dollarDiscount = None if self . discountType != self . DiscountType . percentDiscount : self . percentDiscount = None self . percentUniversallyApplied = False super ( DiscountCombo , self ) . save ( * args , ** kwargs )
Don t save any passed values related to a type of discount that is not the specified type
47,639
def checkVoucherCode ( sender , ** kwargs ) : logger . debug ( 'Signal to check RegistrationContactForm handled by vouchers app.' ) formData = kwargs . get ( 'formData' , { } ) request = kwargs . get ( 'request' , { } ) registration = kwargs . get ( 'registration' , None ) session = getattr ( request , 'session' , { } ) . get ( REG_VALIDATION_STR , { } ) id = formData . get ( 'gift' , '' ) first = formData . get ( 'firstName' ) last = formData . get ( 'lastName' ) email = formData . get ( 'email' ) session . pop ( 'total_voucher_amount' , 0 ) session . pop ( 'voucher_names' , None ) session . pop ( 'gift' , None ) if id == '' : return if not getConstant ( 'vouchers__enableVouchers' ) : raise ValidationError ( { 'gift' : _ ( 'Vouchers are disabled.' ) } ) if session . get ( 'gift' , '' ) != '' : raise ValidationError ( { 'gift' : _ ( 'Can\'t have more than one voucher' ) } ) eventids = [ x . event . id for x in registration . temporaryeventregistration_set . exclude ( dropIn = True ) ] seriess = Series . objects . filter ( id__in = eventids ) obj = Voucher . objects . filter ( voucherId = id ) . first ( ) if not obj : raise ValidationError ( { 'gift' : _ ( 'Invalid Voucher Id' ) } ) else : customer = Customer . objects . filter ( first_name = first , last_name = last , email = email ) . first ( ) try : obj . validateForCustomerAndSeriess ( customer , seriess ) except ValidationError as e : raise ValidationError ( { 'gift' : e } ) return
Check that the given voucher code is valid
47,640
def applyVoucherCodeTemporarily ( sender , ** kwargs ) : logger . debug ( 'Signal fired to apply temporary vouchers.' ) reg = kwargs . pop ( 'registration' ) voucherId = reg . data . get ( 'gift' , '' ) try : voucher = Voucher . objects . get ( voucherId = voucherId ) except ObjectDoesNotExist : logger . debug ( 'No applicable vouchers found.' ) return tvu = TemporaryVoucherUse ( voucher = voucher , registration = reg , amount = 0 ) tvu . save ( ) logger . debug ( 'Temporary voucher use object created.' )
When the core registration system creates a temporary registration with a voucher code the voucher app looks for vouchers that match that code and creates TemporaryVoucherUse objects to keep track of the fact that the voucher may be used .
47,641
def applyReferrerVouchersTemporarily ( sender , ** kwargs ) : if not getConstant ( 'referrals__enableReferralProgram' ) : return logger . debug ( 'Signal fired to temporarily apply referrer vouchers.' ) reg = kwargs . pop ( 'registration' ) try : c = Customer . objects . get ( user__email = reg . email ) vouchers = c . getReferralVouchers ( ) except ObjectDoesNotExist : vouchers = None if not vouchers : logger . debug ( 'No referral vouchers found.' ) return for v in vouchers : TemporaryVoucherUse ( voucher = v , registration = reg , amount = 0 ) . save ( )
Unlike voucher codes which have to be manually supplied referrer discounts are automatically applied here assuming that the referral program is enabled .
47,642
def applyVoucherCodesFinal ( sender , ** kwargs ) : logger . debug ( 'Signal fired to mark voucher codes as applied.' ) finalReg = kwargs . pop ( 'registration' ) tr = finalReg . temporaryRegistration tvus = TemporaryVoucherUse . objects . filter ( registration = tr ) for tvu in tvus : vu = VoucherUse ( voucher = tvu . voucher , registration = finalReg , amount = tvu . amount ) vu . save ( ) if getConstant ( 'referrals__enableReferralProgram' ) : awardReferrers ( vu )
Once a registration has been completed vouchers are used and referrers are awarded
47,643
def provideCustomerReferralCode ( sender , ** kwargs ) : customer = kwargs . pop ( 'customer' ) if getConstant ( 'vouchers__enableVouchers' ) and getConstant ( 'referrals__enableReferralProgram' ) : vrd = ensureReferralVouchersExist ( customer ) return { 'referralVoucherId' : vrd . referreeVoucher . voucherId }
If the vouchers app is installed and referrals are enabled then the customer s profile page can show their voucher referral code .
47,644
def get_prep_lookup ( self ) : raise FieldError ( "{} '{}' does not support lookups" . format ( self . lhs . field . __class__ . __name__ , self . lookup_name ) )
Raise errors for unsupported lookups
47,645
def derive_fernet_key ( input_key ) : hkdf = HKDF ( algorithm = hashes . SHA256 ( ) , length = 32 , salt = salt , info = info , backend = backend , ) return base64 . urlsafe_b64encode ( hkdf . derive ( force_bytes ( input_key ) ) )
Derive a 32 - bit b64 - encoded Fernet key from arbitrary input key .
47,646
def reduce_cpu ( f , x , axes , dtype ) : axes = _get_axes ( axes , x . ndim ) if not axes : return x permute = [ n for n in range ( x . ndim ) if n not in axes ] permute = axes + permute T = x . transpose ( permute = permute ) N = len ( axes ) t = T . type . at ( N , dtype = dtype ) acc = x . empty ( t , device = x . device ) if f . identity is not None : _copyto ( acc , f . identity ) tl = T elif N == 1 and T . type . shape [ 0 ] > 0 : hd , tl = T [ 0 ] , T [ 1 : ] acc [ ( ) ] = hd else : raise ValueError ( "reduction not possible for function without an identity element" ) return fold ( f , acc , tl )
NumPy s reduce in terms of fold .
47,647
def reduce_cuda ( g , x , axes , dtype ) : if axes != 0 : raise NotImplementedError ( "'axes' keyword is not implemented for CUDA" ) return g ( x , dtype = dtype )
Reductions in CUDA use the thrust library for speed and have limited functionality .
47,648
def maxlevel ( lst ) : maxlev = 0 def f ( lst , level ) : nonlocal maxlev if isinstance ( lst , list ) : level += 1 maxlev = max ( level , maxlev ) for item in lst : f ( item , level ) f ( lst , 0 ) return maxlev
Return maximum nesting depth
47,649
def getitem ( lst , indices ) : if not indices : return lst i , indices = indices [ 0 ] , indices [ 1 : ] item = list . __getitem__ ( lst , i ) if isinstance ( i , int ) : return getitem ( item , indices ) if not item : if lst : _ = getitem ( lst , ( slice ( None ) , ) + indices ) elif any ( isinstance ( k , int ) for k in indices ) : raise IndexError return [ ] return [ getitem ( x , indices ) for x in item ]
Definition for multidimensional slicing and indexing on arbitrarily shaped nested lists .
47,650
def genslices ( n ) : def range_with_none ( ) : yield None yield from range ( - n , n + 1 ) for t in product ( range_with_none ( ) , range_with_none ( ) , range_with_none ( ) ) : s = slice ( * t ) if s . step != 0 : yield s
Generate all possible slices for a single dimension .
47,651
def genslices_ndim ( ndim , shape ) : iterables = [ genslices ( shape [ n ] ) for n in range ( ndim ) ] yield from product ( * iterables )
Generate all possible slice tuples for shape .
47,652
def mutator ( mutate ) : @ functools . wraps ( mutate ) def inspyred_mutator ( random , candidates , args ) : mutants = [ ] for i , cs in enumerate ( candidates ) : mutants . append ( mutate ( random , cs , args ) ) return mutants inspyred_mutator . single_mutation = mutate return inspyred_mutator
Return an inspyred mutator function based on the given function . This function generator takes a function that operates on only one candidate to produce a single mutated candidate . The generator handles the iteration over each candidate in the set to be mutated .
47,653
def bit_flip_mutation ( random , candidate , args ) : rate = args . setdefault ( 'mutation_rate' , 0.1 ) mutant = copy . copy ( candidate ) if len ( mutant ) == len ( [ x for x in mutant if x in [ 0 , 1 ] ] ) : for i , m in enumerate ( mutant ) : if random . random ( ) < rate : mutant [ i ] = ( m + 1 ) % 2 return mutant
Return the mutants produced by bit - flip mutation on the candidates .
47,654
def random_reset_mutation ( random , candidate , args ) : bounder = args [ '_ec' ] . bounder try : values = bounder . values except AttributeError : values = None if values is not None : rate = args . setdefault ( 'mutation_rate' , 0.1 ) mutant = copy . copy ( candidate ) for i , m in enumerate ( mutant ) : if random . random ( ) < rate : mutant [ i ] = random . choice ( values ) return mutant else : return candidate
Return the mutants produced by randomly choosing new values .
47,655
def scramble_mutation ( random , candidate , args ) : rate = args . setdefault ( 'mutation_rate' , 0.1 ) if random . random ( ) < rate : size = len ( candidate ) p = random . randint ( 0 , size - 1 ) q = random . randint ( 0 , size - 1 ) p , q = min ( p , q ) , max ( p , q ) s = candidate [ p : q + 1 ] random . shuffle ( s ) return candidate [ : p ] + s [ : : - 1 ] + candidate [ q + 1 : ] else : return candidate
Return the mutants created by scramble mutation on the candidates .
47,656
def gaussian_mutation ( random , candidate , args ) : mut_rate = args . setdefault ( 'mutation_rate' , 0.1 ) mean = args . setdefault ( 'gaussian_mean' , 0.0 ) stdev = args . setdefault ( 'gaussian_stdev' , 1.0 ) bounder = args [ '_ec' ] . bounder mutant = copy . copy ( candidate ) for i , m in enumerate ( mutant ) : if random . random ( ) < mut_rate : mutant [ i ] += random . gauss ( mean , stdev ) mutant = bounder ( mutant , args ) return mutant
Return the mutants created by Gaussian mutation on the candidates .
47,657
def nonuniform_mutation ( random , candidate , args ) : bounder = args [ '_ec' ] . bounder num_gens = args [ '_ec' ] . num_generations max_gens = args [ 'max_generations' ] strength = args . setdefault ( 'mutation_strength' , 1 ) exponent = ( 1.0 - num_gens / float ( max_gens ) ) ** strength mutant = copy . copy ( candidate ) for i , ( c , lo , hi ) in enumerate ( zip ( candidate , bounder . lower_bound , bounder . upper_bound ) ) : if random . random ( ) <= 0.5 : new_value = c + ( hi - c ) * ( 1.0 - random . random ( ) ** exponent ) else : new_value = c - ( c - lo ) * ( 1.0 - random . random ( ) ** exponent ) mutant [ i ] = new_value return mutant
Return the mutants produced by nonuniform mutation on the candidates .
47,658
def crossover ( cross ) : @ functools . wraps ( cross ) def inspyred_crossover ( random , candidates , args ) : if len ( candidates ) % 2 == 1 : candidates = candidates [ : - 1 ] moms = candidates [ : : 2 ] dads = candidates [ 1 : : 2 ] children = [ ] for i , ( mom , dad ) in enumerate ( zip ( moms , dads ) ) : cross . index = i offspring = cross ( random , mom , dad , args ) for o in offspring : children . append ( o ) return children inspyred_crossover . single_crossover = cross return inspyred_crossover
Return an inspyred crossover function based on the given function .
47,659
def n_point_crossover ( random , mom , dad , args ) : crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) num_crossover_points = args . setdefault ( 'num_crossover_points' , 1 ) children = [ ] if random . random ( ) < crossover_rate : num_cuts = min ( len ( mom ) - 1 , num_crossover_points ) cut_points = random . sample ( range ( 1 , len ( mom ) ) , num_cuts ) cut_points . sort ( ) bro = copy . copy ( dad ) sis = copy . copy ( mom ) normal = True for i , ( m , d ) in enumerate ( zip ( mom , dad ) ) : if i in cut_points : normal = not normal if not normal : bro [ i ] = m sis [ i ] = d normal = not normal children . append ( bro ) children . append ( sis ) else : children . append ( mom ) children . append ( dad ) return children
Return the offspring of n - point crossover on the candidates .
47,660
def uniform_crossover ( random , mom , dad , args ) : ux_bias = args . setdefault ( 'ux_bias' , 0.5 ) crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) children = [ ] if random . random ( ) < crossover_rate : bro = copy . copy ( dad ) sis = copy . copy ( mom ) for i , ( m , d ) in enumerate ( zip ( mom , dad ) ) : if random . random ( ) < ux_bias : bro [ i ] = m sis [ i ] = d children . append ( bro ) children . append ( sis ) else : children . append ( mom ) children . append ( dad ) return children
Return the offspring of uniform crossover on the candidates .
47,661
def partially_matched_crossover ( random , mom , dad , args ) : crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) if random . random ( ) < crossover_rate : size = len ( mom ) points = random . sample ( range ( size ) , 2 ) x , y = min ( points ) , max ( points ) bro = copy . copy ( dad ) bro [ x : y + 1 ] = mom [ x : y + 1 ] sis = copy . copy ( mom ) sis [ x : y + 1 ] = dad [ x : y + 1 ] for parent , child in zip ( [ dad , mom ] , [ bro , sis ] ) : for i in range ( x , y + 1 ) : if parent [ i ] not in child [ x : y + 1 ] : spot = i while x <= spot <= y : spot = parent . index ( child [ spot ] ) child [ spot ] = parent [ i ] return [ bro , sis ] else : return [ mom , dad ]
Return the offspring of partially matched crossover on the candidates .
47,662
def arithmetic_crossover ( random , mom , dad , args ) : ax_alpha = args . setdefault ( 'ax_alpha' , 0.5 ) ax_points = args . setdefault ( 'ax_points' , None ) crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) bounder = args [ '_ec' ] . bounder children = [ ] if random . random ( ) < crossover_rate : bro = copy . copy ( dad ) sis = copy . copy ( mom ) if ax_points is None : ax_points = list ( range ( min ( len ( bro ) , len ( sis ) ) ) ) for i in ax_points : bro [ i ] = ax_alpha * mom [ i ] + ( 1 - ax_alpha ) * dad [ i ] sis [ i ] = ax_alpha * dad [ i ] + ( 1 - ax_alpha ) * mom [ i ] bro = bounder ( bro , args ) sis = bounder ( sis , args ) children . append ( bro ) children . append ( sis ) else : children . append ( mom ) children . append ( dad ) return children
Return the offspring of arithmetic crossover on the candidates .
47,663
def blend_crossover ( random , mom , dad , args ) : blx_alpha = args . setdefault ( 'blx_alpha' , 0.1 ) blx_points = args . setdefault ( 'blx_points' , None ) crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) bounder = args [ '_ec' ] . bounder children = [ ] if random . random ( ) < crossover_rate : bro = copy . copy ( dad ) sis = copy . copy ( mom ) if blx_points is None : blx_points = list ( range ( min ( len ( bro ) , len ( sis ) ) ) ) for i in blx_points : smallest , largest = min ( mom [ i ] , dad [ i ] ) , max ( mom [ i ] , dad [ i ] ) delta = blx_alpha * ( largest - smallest ) bro [ i ] = smallest - delta + random . random ( ) * ( largest - smallest + 2 * delta ) sis [ i ] = smallest - delta + random . random ( ) * ( largest - smallest + 2 * delta ) bro = bounder ( bro , args ) sis = bounder ( sis , args ) children . append ( bro ) children . append ( sis ) else : children . append ( mom ) children . append ( dad ) return children
Return the offspring of blend crossover on the candidates .
47,664
def heuristic_crossover ( random , candidates , args ) : crossover_rate = args . setdefault ( 'crossover_rate' , 1.0 ) bounder = args [ '_ec' ] . bounder if len ( candidates ) % 2 == 1 : candidates = candidates [ : - 1 ] population = list ( args [ '_ec' ] . population ) lookup = dict ( zip ( [ pickle . dumps ( p . candidate , 1 ) for p in population ] , population ) ) moms = candidates [ : : 2 ] dads = candidates [ 1 : : 2 ] children = [ ] for mom , dad in zip ( moms , dads ) : if random . random ( ) < crossover_rate : bro = copy . copy ( dad ) sis = copy . copy ( mom ) mom_is_better = lookup [ pickle . dumps ( mom , 1 ) ] > lookup [ pickle . dumps ( dad , 1 ) ] for i , ( m , d ) in enumerate ( zip ( mom , dad ) ) : negpos = 1 if mom_is_better else - 1 val = d if mom_is_better else m bro [ i ] = val + random . random ( ) * negpos * ( m - d ) sis [ i ] = val + random . random ( ) * negpos * ( m - d ) bro = bounder ( bro , args ) sis = bounder ( sis , args ) children . append ( bro ) children . append ( sis ) else : children . append ( mom ) children . append ( dad ) return children
Return the offspring of heuristic crossover on the candidates .
47,665
def gravitational_force ( position_a , mass_a , position_b , mass_b ) : distance = distance_between ( position_a , position_b ) angle = math . atan2 ( position_a [ 1 ] - position_b [ 1 ] , position_a [ 0 ] - position_b [ 0 ] ) magnitude = G * mass_a * mass_b / ( distance ** 2 ) sign = - 1 if mass_b > mass_a else 1 x_force = sign * magnitude * math . cos ( angle ) y_force = sign * magnitude * math . sin ( angle ) return x_force , y_force
Returns the gravitational force between the two bodies a and b .
47,666
def force_on_satellite ( position , mass ) : earth_grav_force = gravitational_force ( position , mass , earth_position , earth_mass ) moon_grav_force = gravitational_force ( position , mass , moon_position , moon_mass ) F_x = earth_grav_force [ 0 ] + moon_grav_force [ 0 ] F_y = earth_grav_force [ 1 ] + moon_grav_force [ 1 ] return F_x , F_y
Returns the total gravitational force acting on the body from the Earth and Moon .
47,667
def acceleration_of_satellite ( position , mass ) : F_x , F_y = force_on_satellite ( position , mass ) return F_x / mass , F_y / mass
Returns the acceleration based on all forces acting upon the body .
47,668
def evaluator ( evaluate ) : @ functools . wraps ( evaluate ) def inspyred_evaluator ( candidates , args ) : fitness = [ ] for candidate in candidates : fitness . append ( evaluate ( candidate , args ) ) return fitness inspyred_evaluator . single_evaluation = evaluate return inspyred_evaluator
Return an inspyred evaluator function based on the given function . This function generator takes a function that evaluates only one candidate . The generator handles the iteration over each candidate to be evaluated .
47,669
def parallel_evaluation_pp ( candidates , args ) : import pp logger = args [ '_ec' ] . logger try : evaluator = args [ 'pp_evaluator' ] except KeyError : logger . error ( 'parallel_evaluation_pp requires \'pp_evaluator\' be defined in the keyword arguments list' ) raise secret_key = args . setdefault ( 'pp_secret' , 'inspyred' ) try : job_server = args [ '_pp_job_server' ] except KeyError : pp_servers = args . get ( 'pp_servers' , ( "*" , ) ) pp_nprocs = args . get ( 'pp_nprocs' , 'autodetect' ) job_server = pp . Server ( ncpus = pp_nprocs , ppservers = pp_servers , secret = secret_key ) args [ '_pp_job_server' ] = job_server pp_depends = args . setdefault ( 'pp_dependencies' , ( ) ) pp_modules = args . setdefault ( 'pp_modules' , ( ) ) pickled_args = { } for key in args : try : pickle . dumps ( args [ key ] ) pickled_args [ key ] = args [ key ] except ( TypeError , pickle . PickleError , pickle . PicklingError ) : logger . debug ( 'unable to pickle args parameter {0} in parallel_evaluation_pp' . format ( key ) ) pass func_template = pp . Template ( job_server , evaluator , pp_depends , pp_modules ) jobs = [ func_template . submit ( [ c ] , pickled_args ) for c in candidates ] fitness = [ ] for i , job in enumerate ( jobs ) : r = job ( ) try : fitness . append ( r [ 0 ] ) except TypeError : logger . warning ( 'parallel_evaluation_pp generated an invalid fitness for candidate {0}' . format ( candidates [ i ] ) ) fitness . append ( None ) return fitness
Evaluate the candidates in parallel using Parallel Python .
47,670
def parallel_evaluation_mp ( candidates , args ) : import time import multiprocessing logger = args [ '_ec' ] . logger try : evaluator = args [ 'mp_evaluator' ] except KeyError : logger . error ( 'parallel_evaluation_mp requires \'mp_evaluator\' be defined in the keyword arguments list' ) raise try : nprocs = args [ 'mp_nprocs' ] except KeyError : nprocs = multiprocessing . cpu_count ( ) pickled_args = { } for key in args : try : pickle . dumps ( args [ key ] ) pickled_args [ key ] = args [ key ] except ( TypeError , pickle . PickleError , pickle . PicklingError ) : logger . debug ( 'unable to pickle args parameter {0} in parallel_evaluation_mp' . format ( key ) ) pass start = time . time ( ) try : pool = multiprocessing . Pool ( processes = nprocs ) results = [ pool . apply_async ( evaluator , ( [ c ] , pickled_args ) ) for c in candidates ] pool . close ( ) pool . join ( ) return [ r . get ( ) [ 0 ] for r in results ] except ( OSError , RuntimeError ) as e : logger . error ( 'failed parallel_evaluation_mp: {0}' . format ( str ( e ) ) ) raise else : end = time . time ( ) logger . debug ( 'completed parallel_evaluation_mp in {0} seconds' . format ( end - start ) )
Evaluate the candidates in parallel using multiprocessing .
47,671
def allow_ajax ( request ) : if request . META . get ( 'REMOTE_ADDR' , None ) not in settings . INTERNAL_IPS : return False if toolbar_version < LooseVersion ( '1.8' ) and request . get_full_path ( ) . startswith ( DEBUG_TOOLBAR_URL_PREFIX ) and request . GET . get ( 'panel_id' , None ) != 'RequestHistoryPanel' : return False return bool ( settings . DEBUG )
Default function to determine whether to show the toolbar on a given page .
47,672
def content ( self ) : toolbars = OrderedDict ( ) for id , toolbar in DebugToolbar . _store . items ( ) : content = { } for panel in toolbar . panels : panel_id = None nav_title = '' nav_subtitle = '' try : panel_id = panel . panel_id nav_title = panel . nav_title nav_subtitle = panel . nav_subtitle ( ) if isinstance ( panel . nav_subtitle , Callable ) else panel . nav_subtitle except Exception : logger . debug ( 'Error parsing panel info:' , exc_info = True ) if panel_id is not None : content . update ( { panel_id : { 'panel_id' : panel_id , 'nav_title' : nav_title , 'nav_subtitle' : nav_subtitle , } } ) toolbars [ id ] = { 'toolbar' : toolbar , 'content' : content } return get_template ( ) . render ( Context ( { 'toolbars' : OrderedDict ( reversed ( list ( toolbars . items ( ) ) ) ) , 'trunc_length' : CONFIG . get ( 'RH_POST_TRUNC_LENGTH' , 0 ) } ) )
Content of the panel when it s displayed in full screen .
47,673
def extract_content ( self , selector = '' , attr = '' , default = '' , connector = '' , * args , ** kwargs ) : try : if selector . lower ( ) == "url" : return self . url if attr . lower ( ) == "text" : tag = self . get_tree_tag ( selector = selector , get_one = True ) content = connector . join ( [ make_ascii ( x ) . strip ( ) for x in tag . itertext ( ) ] ) content = content . replace ( "\n" , " " ) . strip ( ) else : tag = self . get_tree_tag ( selector = selector , get_one = True ) content = tag . get ( attr ) if attr in [ "href" , "src" ] : content = urljoin ( self . url , content ) return content except IndexError : if default is not "" : return default raise Exception ( "There is no content for the %s selector - %s" % ( self . __selector_type__ , selector ) ) except XPathError : raise Exception ( "Invalid %s selector - %s" % ( self . __selector_type__ , selector ) )
Method for performing the content extraction for the particular selector type . \
47,674
def extract_links ( self , selector = '' , * args , ** kwargs ) : try : links = self . get_tree_tag ( selector = selector ) for link in links : next_url = urljoin ( self . url , link . get ( 'href' ) ) yield type ( self ) ( next_url ) except XPathError : raise Exception ( "Invalid %s selector - %s" % ( self . __selector_type__ , selector ) ) except Exception : raise Exception ( "Invalid %s selector - %s" % ( self . __selector_type__ , selector ) )
Method for performing the link extraction for the crawler . \
47,675
def extract_tabular ( self , header = '' , prefix = '' , suffix = '' , table_type = '' , * args , ** kwargs ) : if type ( header ) in [ str , unicode ] : try : header_list = self . get_tree_tag ( header ) table_headers = [ prefix + h . text + suffix for h in header_list ] except XPathError : raise Exception ( "Invalid %s selector for table header - %s" % ( self . __selector_type__ , header ) ) except Exception : raise Exception ( "Invalid %s selector for table header - %s" % ( self . __selector_type__ , header ) ) else : table_headers = [ prefix + h + suffix for h in header ] if len ( table_headers ) == 0 : raise Exception ( "Invalid %s selector for table header - %s" % ( self . __selector_type__ , header ) ) if table_type not in [ "rows" , "columns" ] : raise Exception ( "Specify 'rows' or 'columns' in table_type" ) if table_type == "rows" : result_list = self . extract_rows ( table_headers = table_headers , * args , ** kwargs ) else : result_list = self . extract_columns ( table_headers = table_headers , * args , ** kwargs ) return table_headers , result_list
Method for performing the tabular data extraction . \
47,676
def extract_rows ( self , result = { } , selector = '' , table_headers = [ ] , attr = '' , connector = '' , default = '' , verbosity = 0 , * args , ** kwargs ) : result_list = [ ] try : values = self . get_tree_tag ( selector ) if len ( table_headers ) >= len ( values ) : from itertools import izip_longest pairs = izip_longest ( table_headers , values , fillvalue = default ) else : from itertools import izip pairs = izip ( table_headers , values ) for head , val in pairs : if verbosity > 1 : print ( "\nExtracting" , head , "attribute" , sep = ' ' , end = '' ) if attr . lower ( ) == "text" : try : content = connector . join ( [ make_ascii ( x ) . strip ( ) for x in val . itertext ( ) ] ) except Exception : content = default content = content . replace ( "\n" , " " ) . strip ( ) else : content = val . get ( attr ) if attr in [ "href" , "src" ] : content = urljoin ( self . url , content ) result [ head ] = content result_list . append ( result ) except XPathError : raise Exception ( "Invalid %s selector - %s" % ( self . __selector_type__ , selector ) ) except TypeError : raise Exception ( "Selector expression string to be provided. Got " + selector ) return result_list
Row data extraction for extract_tabular
47,677
def extract_columns ( self , result = { } , selector = '' , table_headers = [ ] , attr = '' , connector = '' , default = '' , verbosity = 0 , * args , ** kwargs ) : result_list = [ ] try : if type ( selector ) in [ str , unicode ] : selectors = [ selector ] elif type ( selector ) == list : selectors = selector [ : ] else : raise Exception ( "Use a list of selector expressions for the various columns" ) from itertools import izip , count pairs = izip ( table_headers , selectors ) columns = { } for head , selector in pairs : columns [ head ] = self . get_tree_tag ( selector ) try : for i in count ( start = 0 ) : r = result . copy ( ) for head in columns . keys ( ) : if verbosity > 1 : print ( "\nExtracting" , head , "attribute" , sep = ' ' , end = '' ) col = columns [ head ] [ i ] if attr == "text" : try : content = connector . join ( [ make_ascii ( x ) . strip ( ) for x in col . itertext ( ) ] ) except Exception : content = default content = content . replace ( "\n" , " " ) . strip ( ) else : content = col . get ( attr ) if attr in [ "href" , "src" ] : content = urljoin ( self . url , content ) r [ head ] = content result_list . append ( r ) except IndexError : pass except XPathError : raise Exception ( "Invalid %s selector - %s" % ( self . __selector_type__ , selector ) ) except TypeError : raise Exception ( "Selector expression string to be provided. Got " + selector ) return result_list
Column data extraction for extract_tabular
47,678
def runCLI ( ) : args = docopt ( __doc__ , version = '0.3.0' ) try : check_arguments ( args ) command_list = [ 'genconfig' , 'run' , 'generate' ] select = itemgetter ( 'genconfig' , 'run' , 'generate' ) selectedCommand = command_list [ select ( args ) . index ( True ) ] cmdClass = get_command_class ( selectedCommand ) obj = cmdClass ( args ) obj . execute_command ( ) except POSSIBLE_EXCEPTIONS as e : print ( '\n' , e , '\n' )
The starting point for the execution of the Scrapple command line tool .
47,679
def check_arguments ( args ) : projectname_re = re . compile ( r'[^a-zA-Z0-9_]' ) if args [ 'genconfig' ] : if args [ '--type' ] not in [ 'scraper' , 'crawler' ] : raise InvalidType ( "--type has to be 'scraper' or 'crawler'" ) if args [ '--selector' ] not in [ 'xpath' , 'css' ] : raise InvalidSelector ( "--selector has to be 'xpath' or 'css'" ) if args [ 'generate' ] or args [ 'run' ] : if args [ '--output_type' ] not in [ 'json' , 'csv' ] : raise InvalidOutputType ( "--output_type has to be 'json' or 'csv'" ) if args [ 'genconfig' ] or args [ 'generate' ] or args [ 'run' ] : if projectname_re . search ( args [ '<projectname>' ] ) is not None : message = "<projectname> should consist of letters, digits or _" raise InvalidProjectName ( message ) try : if int ( args [ '--levels' ] ) < 1 : message = "--levels should be greater than, or equal to 1" raise InvalidLevels ( message ) except ( TypeError , ValueError ) : message = " " . join ( [ "--levels should be an integer and not of type" , "{}" . format ( type ( args [ '--levels' ] ) ) ] ) raise InvalidLevels ( message )
Validates the arguments passed through the CLI commands .
47,680
def form_to_json ( form ) : config = dict ( ) if form [ 'project_name' ] == "" : raise Exception ( 'Project name cannot be empty.' ) if form [ 'selector_type' ] not in [ "css" , "xpath" ] : raise Exception ( 'Selector type has to css or xpath' ) config [ 'project_name' ] = form [ 'project_name' ] config [ 'selector_type' ] = form [ 'selector_type' ] config [ 'scraping' ] = dict ( ) if form [ 'url' ] == "" : raise Exception ( 'URL cannot be empty' ) config [ 'scraping' ] [ 'url' ] = form [ 'url' ] config [ 'scraping' ] [ 'data' ] = list ( ) for i in itertools . count ( start = 1 ) : try : data = { 'field' : form [ 'field_' + str ( i ) ] , 'selector' : form [ 'selector_' + str ( i ) ] , 'attr' : form [ 'attribute_' + str ( i ) ] , 'default' : form [ 'default_' + str ( i ) ] } config [ 'scraping' ] [ 'data' ] . append ( data ) except KeyError : break with open ( os . path . join ( os . getcwd ( ) , form [ 'project_name' ] + '.json' ) , 'w' ) as f : json . dump ( config , f ) return
Takes the form from the POST request in the web interface and generates the JSON config \ file
47,681
def execute_command ( self ) : try : self . args [ '--verbosity' ] = int ( self . args [ '--verbosity' ] ) if self . args [ '--verbosity' ] not in [ 0 , 1 , 2 ] : raise ValueError if self . args [ '--verbosity' ] > 0 : print ( Back . GREEN + Fore . BLACK + "Scrapple Run" ) print ( Back . RESET + Fore . RESET ) import json with open ( self . args [ '<projectname>' ] + '.json' , 'r' ) as f : self . config = json . load ( f ) validate_config ( self . config ) self . run ( ) except ValueError : print ( Back . WHITE + Fore . RED + "Use 0, 1 or 2 for verbosity." + Back . RESET + Fore . RESET , sep = "" ) except IOError : print ( Back . WHITE + Fore . RED + self . args [ '<projectname>' ] , ".json does not " , "exist. Use ``scrapple genconfig``." + Back . RESET + Fore . RESET , sep = "" ) except InvalidConfigException as e : print ( Back . WHITE + Fore . RED + e + Back . RESET + Fore . RESET , sep = "" )
The run command implements the web content extractor corresponding to the given \ configuration file .
47,682
def traverse_next ( page , nextx , results , tabular_data_headers = [ ] , verbosity = 0 ) : for link in page . extract_links ( selector = nextx [ 'follow_link' ] ) : if verbosity > 0 : print ( '\n' ) print ( Back . YELLOW + Fore . BLUE + "Loading page " , link . url + Back . RESET + Fore . RESET , end = '' ) r = results . copy ( ) for attribute in nextx [ 'scraping' ] . get ( 'data' ) : if attribute [ 'field' ] != "" : if verbosity > 1 : print ( "\nExtracting" , attribute [ 'field' ] , "attribute" , sep = ' ' , end = '' ) r [ attribute [ 'field' ] ] = link . extract_content ( ** attribute ) if not nextx [ 'scraping' ] . get ( 'table' ) : result_list = [ r ] else : tables = nextx [ 'scraping' ] . get ( 'table' , [ ] ) for table in tables : table . update ( { 'result' : r , 'verbosity' : verbosity } ) table_headers , result_list = link . extract_tabular ( ** table ) tabular_data_headers . extend ( table_headers ) if not nextx [ 'scraping' ] . get ( 'next' ) : for r in result_list : yield ( tabular_data_headers , r ) else : for nextx2 in nextx [ 'scraping' ] . get ( 'next' ) : for tdh , result in traverse_next ( link , nextx2 , r , tabular_data_headers = tabular_data_headers , verbosity = verbosity ) : yield ( tdh , result )
Recursive generator to traverse through the next attribute and \ crawl through the links to be followed .
47,683
def validate_config ( config ) : fields = [ f for f in get_fields ( config ) ] if len ( fields ) != len ( set ( fields ) ) : raise InvalidConfigException ( "Invalid configuration file - %d duplicate field names" % len ( fields ) - len ( set ( fields ) ) ) return True
Validates the extractor configuration file . Ensures that there are no duplicate field names etc .
47,684
def get_fields ( config ) : for data in config [ 'scraping' ] [ 'data' ] : if data [ 'field' ] != '' : yield data [ 'field' ] if 'next' in config [ 'scraping' ] : for n in config [ 'scraping' ] [ 'next' ] : for f in get_fields ( n ) : yield f
Recursive generator that yields the field names in the config file
47,685
def extract_fieldnames ( config ) : fields = [ ] for x in get_fields ( config ) : if x in fields : fields . append ( x + '_' + str ( fields . count ( x ) + 1 ) ) else : fields . append ( x ) return fields
Function to return a list of unique field names from the config file
47,686
def run ( self , dag : DAGCircuit ) -> DAGCircuit : circ = dagcircuit_to_tk ( dag , _DROP_CONDS = self . DROP_CONDS , _BOX_UNKNOWN = self . BOX_UNKNOWN ) circ , circlay = self . process_circ ( circ ) newdag = tk_to_dagcircuit ( circ ) newdag . name = dag . name finlay = dict ( ) for i , qi in enumerate ( circlay ) : finlay [ ( 'q' , i ) ] = ( 'q' , qi ) newdag . final_layout = finlay return newdag
Run one pass of optimisation on the circuit and route for the given backend .
47,687
def _sort_row_col ( qubits : Iterator [ GridQubit ] ) -> List [ GridQubit ] : return sorted ( qubits , key = lambda x : ( x . row , x . col ) )
Sort grid qubits first by row then by column
47,688
def print_setting ( self ) -> str : ret = "\n" ret += "==================== Setting of {} ============================\n" . format ( self . configuration [ 'name' ] ) ret += "{}" . format ( self . setting ) ret += "===============================================================\n" ret += "{}" . format ( self . _var_form . setting ) ret += "===============================================================\n" return ret
Presents the QSE settings as a string .
47,689
def _energy_evaluation ( self , operator ) : if self . _quantum_state is not None : input_circuit = self . _quantum_state else : input_circuit = [ self . opt_circuit ] if operator . _paulis : mean_energy , std_energy = operator . evaluate_with_result ( self . _operator_mode , input_circuit , self . _quantum_instance . backend , self . ret ) else : mean_energy = 0.0 std_energy = 0.0 operator . disable_summarize_circuits ( ) logger . debug ( 'Energy evaluation {} returned {}' . format ( self . _eval_count , np . real ( mean_energy ) ) ) return np . real ( mean_energy ) , np . real ( std_energy )
Evaluate the energy of the current input circuit with respect to the given operator .
47,690
def _run ( self ) -> dict : if not self . _quantum_instance . is_statevector : raise AquaError ( "Can only calculate state for QSE with statevector backends" ) ret = self . _quantum_instance . execute ( self . opt_circuit ) self . ret = ret self . _eval_count = 0 self . _solve ( ) self . _ret [ 'eval_count' ] = self . _eval_count self . _ret [ 'eval_time' ] = self . _eval_time return self . _ret
Runs the QSE algorithm to compute the eigenvalues of the Hamiltonian .
47,691
def whooshee_search ( self , search_string , group = whoosh . qparser . OrGroup , whoosheer = None , match_substrings = True , limit = None , order_by_relevance = 10 ) : if not whoosheer : entities = set ( ) for cd in self . column_descriptions : entities . add ( cd [ 'type' ] ) if self . _join_entities and isinstance ( self . _join_entities [ 0 ] , Mapper ) : entities . update ( set ( [ x . entity for x in self . _join_entities ] ) ) else : entities . update ( set ( self . _join_entities ) ) unaliased = set ( ) for entity in entities : if isinstance ( entity , ( AliasedClass , AliasedInsp ) ) : unaliased . add ( inspect ( entity ) . mapper . class_ ) else : unaliased . add ( entity ) whoosheer = next ( w for w in _get_config ( self ) [ 'whoosheers' ] if set ( w . models ) == unaliased ) for fname , field in list ( whoosheer . schema . _fields . items ( ) ) : if field . unique : uniq = fname res = whoosheer . search ( search_string = search_string , values_of = uniq , group = group , match_substrings = match_substrings , limit = limit ) if not res : return self . filter ( text ( 'null' ) ) attr = None if hasattr ( whoosheer , '_is_model_whoosheer' ) : attr = getattr ( whoosheer . models [ 0 ] , uniq ) else : for m in whoosheer . models : if m . __name__ . lower ( ) == uniq . split ( '_' ) [ 0 ] : attr = getattr ( m , uniq . split ( '_' ) [ 1 ] ) search_query = self . filter ( attr . in_ ( res ) ) if order_by_relevance < 0 : search_query = search_query . order_by ( sqlalchemy . sql . expression . case ( [ ( attr == uniq_val , index ) for index , uniq_val in enumerate ( res ) ] , ) ) elif order_by_relevance > 0 : search_query = search_query . order_by ( sqlalchemy . sql . expression . case ( [ ( attr == uniq_val , index ) for index , uniq_val in enumerate ( res ) if index < order_by_relevance ] , else_ = order_by_relevance ) ) else : pass return search_query
Do a fulltext search on the query . Returns a query filtered with results of the fulltext search .
47,692
def search ( cls , search_string , values_of = '' , group = whoosh . qparser . OrGroup , match_substrings = True , limit = None ) : index = Whooshee . get_or_create_index ( _get_app ( cls ) , cls ) prepped_string = cls . prep_search_string ( search_string , match_substrings ) with index . searcher ( ) as searcher : parser = whoosh . qparser . MultifieldParser ( cls . schema . names ( ) , index . schema , group = group ) query = parser . parse ( prepped_string ) results = searcher . search ( query , limit = limit ) if values_of : return [ x [ values_of ] for x in results ] return results
Searches the fields for given search_string . Returns the found records if values_of is left empty else the values of the given columns .
47,693
def create_index ( cls , app , wh ) : if app . extensions [ 'whooshee' ] [ 'memory_storage' ] : storage = RamStorage ( ) index = storage . create_index ( wh . schema ) assert index return index else : index_path = os . path . join ( app . extensions [ 'whooshee' ] [ 'index_path_root' ] , getattr ( wh , 'index_subdir' , cls . camel_to_snake ( wh . __name__ ) ) ) if whoosh . index . exists_in ( index_path ) : index = whoosh . index . open_dir ( index_path ) else : if not os . path . exists ( index_path ) : os . makedirs ( index_path ) index = whoosh . index . create_in ( index_path , wh . schema ) return index
Creates and opens an index for the given whoosheer and app . If the index already exists it just opens it otherwise it creates it first .
47,694
def get_or_create_index ( cls , app , wh ) : if wh in app . extensions [ 'whooshee' ] [ 'whoosheers_indexes' ] : return app . extensions [ 'whooshee' ] [ 'whoosheers_indexes' ] [ wh ] index = cls . create_index ( app , wh ) app . extensions [ 'whooshee' ] [ 'whoosheers_indexes' ] [ wh ] = index return index
Gets a previously cached index or creates a new one for the given app and whoosheer .
47,695
def on_commit ( self , changes ) : if _get_config ( self ) [ 'enable_indexing' ] is False : return None for wh in self . whoosheers : if not wh . auto_update : continue writer = None for change in changes : if change [ 0 ] . __class__ in wh . models : method_name = '{0}_{1}' . format ( change [ 1 ] , change [ 0 ] . __class__ . __name__ . lower ( ) ) method = getattr ( wh , method_name , None ) if method : if not writer : writer = type ( self ) . get_or_create_index ( _get_app ( self ) , wh ) . writer ( timeout = _get_config ( self ) [ 'writer_timeout' ] ) method ( writer , change [ 0 ] ) if writer : writer . commit ( )
Method that gets called when a model is changed . This serves to do the actual index writing .
47,696
def reindex ( self ) : for wh in self . whoosheers : index = type ( self ) . get_or_create_index ( _get_app ( self ) , wh ) writer = index . writer ( timeout = _get_config ( self ) [ 'writer_timeout' ] ) for model in wh . models : method_name = "{0}_{1}" . format ( UPDATE_KWD , model . __name__ . lower ( ) ) for item in model . query . all ( ) : getattr ( wh , method_name ) ( writer , item ) writer . commit ( )
Reindex all data
47,697
def dump_info ( ) : vultr = Vultr ( API_KEY ) try : logging . info ( 'Listing account info:\n%s' , dumps ( vultr . account . info ( ) , indent = 2 ) ) logging . info ( 'Listing apps:\n%s' , dumps ( vultr . app . list ( ) , indent = 2 ) ) logging . info ( 'Listing backups:\n%s' , dumps ( vultr . backup . list ( ) , indent = 2 ) ) logging . info ( 'Listing DNS:\n%s' , dumps ( vultr . dns . list ( ) , indent = 2 ) ) logging . info ( 'Listing ISOs:\n%s' , dumps ( vultr . iso . list ( ) , indent = 2 ) ) logging . info ( 'Listing OSs:\n%s' , dumps ( vultr . os . list ( ) , indent = 2 ) ) logging . info ( 'Listing plans:\n%s' , dumps ( vultr . plans . list ( ) , indent = 2 ) ) logging . info ( 'Listing regions:\n%s' , dumps ( vultr . regions . list ( ) , indent = 2 ) ) logging . info ( 'Listing servers:\n%s' , dumps ( vultr . server . list ( ) , indent = 2 ) ) logging . info ( 'Listing snapshots:\n%s' , dumps ( vultr . snapshot . list ( ) , indent = 2 ) ) logging . info ( 'Listing SSH keys:\n%s' , dumps ( vultr . sshkey . list ( ) , indent = 2 ) ) logging . info ( 'Listing startup scripts:\n%s' , dumps ( vultr . startupscript . list ( ) , indent = 2 ) ) except VultrError as ex : logging . error ( 'VultrError: %s' , ex )
Shows various details about the account & servers
47,698
def update_params ( params , updates ) : params = params . copy ( ) if isinstance ( params , dict ) else dict ( ) params . update ( updates ) return params
Merges updates into params
47,699
def _request_get_helper ( self , url , params = None ) : if not isinstance ( params , dict ) : params = dict ( ) if self . api_key : params [ 'api_key' ] = self . api_key return requests . get ( url , params = params , timeout = 60 )
API GET request helper