idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
22,800
def get_translation_cache_key ( translated_model , master_id , language_code ) : return 'parler.{0}.{1}.{2}.{3}' . format ( translated_model . _meta . app_label , translated_model . __name__ , master_id , language_code )
The low - level function to get the cache key for a translation .
22,801
def get_cached_translation ( instance , language_code = None , related_name = None , use_fallback = False ) : if language_code is None : language_code = instance . get_current_language ( ) translated_model = instance . _parler_meta . get_model_by_related_name ( related_name ) values = _get_cached_values ( instance , translated_model , language_code , use_fallback ) if not values : return None try : translation = translated_model ( ** values ) except TypeError : return None translation . _state . adding = False return translation
Fetch an cached translation .
22,802
def _cache_translation ( translation , timeout = cache . default_timeout ) : if not appsettings . PARLER_ENABLE_CACHING : return if translation . master_id is None : raise ValueError ( "Can't cache unsaved translation" ) fields = translation . get_translated_fields ( ) values = { 'id' : translation . id } for name in fields : values [ name ] = getattr ( translation , name ) key = get_translation_cache_key ( translation . __class__ , translation . master_id , translation . language_code ) cache . set ( key , values , timeout = timeout )
Store a new translation in the cache .
22,803
def _cache_translation_needs_fallback ( instance , language_code , related_name , timeout = cache . default_timeout ) : if not appsettings . PARLER_ENABLE_CACHING or not instance . pk or instance . _state . adding : return tr_model = instance . _parler_meta . get_model_by_related_name ( related_name ) key = get_translation_cache_key ( tr_model , instance . pk , language_code ) cache . set ( key , { '__FALLBACK__' : True } , timeout = timeout )
Store the fact that a translation doesn t exist and the fallback should be used .
22,804
def select_template_name ( template_name_list , using = None ) : if not isinstance ( template_name_list , tuple ) : template_name_list = tuple ( template_name_list ) try : return _cached_name_lookups [ template_name_list ] except KeyError : for template_name in template_name_list : try : get_template ( template_name , using = using ) except TemplateDoesNotExist : continue else : template_name = six . text_type ( template_name ) _cached_name_lookups [ template_name_list ] = template_name return template_name return None
Given a list of template names find the first one that exists .
22,805
def get_view_url ( self ) : if not self . view_url_name : raise ImproperlyConfigured ( "Missing `view_url_name` attribute on {0}" . format ( self . __class__ . __name__ ) ) return reverse ( self . view_url_name , args = self . args , kwargs = self . kwargs )
This method is used by the get_translated_url template tag .
22,806
def get_object ( self , queryset = None ) : if queryset is None : queryset = self . get_queryset ( ) slug = self . kwargs [ self . slug_url_kwarg ] choices = self . get_language_choices ( ) obj = None using_fallback = False prev_choices = [ ] for lang_choice in choices : try : filters = self . get_translated_filters ( slug = slug ) obj = queryset . translated ( lang_choice , ** filters ) . language ( lang_choice ) . get ( ) except ObjectDoesNotExist : using_fallback = True prev_choices . append ( lang_choice ) else : break if obj is None : tried_msg = ", tried languages: {0}" . format ( ", " . join ( choices ) ) error_message = translation . ugettext ( "No %(verbose_name)s found matching the query" ) % { 'verbose_name' : queryset . model . _meta . verbose_name } raise Http404 ( error_message + tried_msg ) if using_fallback : for prev_choice in prev_choices : if obj . has_translation ( prev_choice ) : raise FallbackLanguageResolved ( obj , prev_choice ) return obj
Fetch the object using a translated slug .
22,807
def get_object ( self , queryset = None ) : object = super ( LanguageChoiceMixin , self ) . get_object ( queryset ) if isinstance ( object , TranslatableModelMixin ) : object . set_current_language ( self . get_language ( ) , initialize = True ) return object
Assign the language for the retrieved object .
22,808
def get_form_class ( self ) : super_method = super ( TranslatableModelFormMixin , self ) . get_form_class default_method = getattr ( ModelFormMixin . get_form_class , '__func__' , ModelFormMixin . get_form_class ) if not ( super_method . __func__ is default_method ) : return super_method ( ) else : if self . form_class : return self . form_class else : model = _get_view_model ( self ) if self . fields : fields = self . fields return modelform_factory ( model , form = TranslatableModelForm , fields = fields ) else : return modelform_factory ( model , form = TranslatableModelForm )
Return a TranslatableModelForm by default if no form_class is set .
22,809
def get_parler_languages_from_django_cms ( cms_languages = None ) : valid_keys = [ 'code' , 'fallbacks' , 'hide_untranslated' , 'redirect_on_fallback' ] if cms_languages : if sys . version_info < ( 3 , 0 , 0 ) : int_types = ( int , long ) else : int_types = int parler_languages = copy . deepcopy ( cms_languages ) for site_id , site_config in cms_languages . items ( ) : if site_id and ( not isinstance ( site_id , int_types ) and site_id != 'default' ) : del parler_languages [ site_id ] continue if site_id == 'default' : for key , value in site_config . items ( ) : if key not in valid_keys : del parler_languages [ 'default' ] [ key ] else : for i , lang_config in enumerate ( site_config ) : for key , value in lang_config . items ( ) : if key not in valid_keys : del parler_languages [ site_id ] [ i ] [ key ] return parler_languages return None
Converts django CMS setting CMS_LANGUAGES into PARLER_LANGUAGES . Since CMS_LANGUAGES is a strict superset of PARLER_LANGUAGES we do a bit of cleansing to remove irrelevant items .
22,810
def get_first_language ( self , site_id = None ) : if site_id is None : site_id = getattr ( settings , 'SITE_ID' , None ) try : return self [ site_id ] [ 0 ] [ 'code' ] except ( KeyError , IndexError ) : return self [ 'default' ] [ 'code' ]
Return the first language for the current site . This can be used for user interfaces where the languages are displayed in tabs .
22,811
def _get_model_form_field ( model , name , formfield_callback = None , ** kwargs ) : field = model . _meta . get_field ( name ) if not field . editable : return None if formfield_callback is None : formfield = field . formfield ( ** kwargs ) elif not callable ( formfield_callback ) : raise TypeError ( 'formfield_callback must be a function or callable' ) else : formfield = formfield_callback ( field , ** kwargs ) return formfield
Utility to create the formfield from a model field . When a field is not editable a None will be returned .
22,812
def save_translated_fields ( self ) : fields = { } for field in self . _translated_fields : try : value = self . cleaned_data [ field ] except KeyError : continue fields [ field ] = value translations = self . instance . _set_translated_fields ( ** fields ) non_translated_fields = set ( ( 'id' , 'master_id' , 'language_code' ) ) for translation in translations : self . _post_clean_translation ( translation ) for field in translation . _get_field_names ( ) : if field in non_translated_fields : continue setattr ( self . instance , field , getattr ( translation , field ) )
Save all translated fields .
22,813
def create_translations_model ( shared_model , related_name , meta , ** fields ) : if not meta : meta = { } if shared_model . _meta . abstract : raise TypeError ( "Can't create TranslatedFieldsModel for abstract class {0}" . format ( shared_model . __name__ ) ) meta [ 'app_label' ] = shared_model . _meta . app_label meta [ 'db_tablespace' ] = shared_model . _meta . db_tablespace meta [ 'managed' ] = shared_model . _meta . managed meta [ 'unique_together' ] = list ( meta . get ( 'unique_together' , [ ] ) ) + [ ( 'language_code' , 'master' ) ] meta . setdefault ( 'db_table' , '{0}_translation' . format ( shared_model . _meta . db_table ) ) meta . setdefault ( 'verbose_name' , _lazy_verbose_name ( shared_model ) ) meta . setdefault ( 'default_permissions' , ( ) ) name = str ( '{0}Translation' . format ( shared_model . __name__ ) ) attrs = { } attrs . update ( fields ) attrs [ 'Meta' ] = type ( str ( 'Meta' ) , ( object , ) , meta ) attrs [ '__module__' ] = shared_model . __module__ attrs [ 'objects' ] = models . Manager ( ) attrs [ 'master' ] = TranslationsForeignKey ( shared_model , related_name = related_name , editable = False , null = True , on_delete = models . CASCADE ) translations_model = TranslatedFieldsModelBase ( name , ( TranslatedFieldsModel , ) , attrs ) mod = sys . modules [ shared_model . __module__ ] setattr ( mod , name , translations_model ) return translations_model
Dynamically create the translations model . Create the translations model for the shared model model .
22,814
def _set_translated_fields ( self , language_code = None , ** fields ) : objects = [ ] for parler_meta , model_fields in self . _parler_meta . _split_fields ( ** fields ) : translation = self . _get_translated_model ( language_code = language_code , auto_create = True , meta = parler_meta ) for field , value in six . iteritems ( model_fields ) : setattr ( translation , field , value ) objects . append ( translation ) return objects
Assign fields to the translated models .
22,815
def create_translation ( self , language_code , ** fields ) : if language_code is None : raise ValueError ( get_null_language_error ( ) ) meta = self . _parler_meta if self . _translations_cache [ meta . root_model ] . get ( language_code , None ) : raise ValueError ( "Translation already exists: {0}" . format ( language_code ) ) for translation in self . _set_translated_fields ( language_code , ** fields ) : self . save_translation ( translation )
Add a translation to the model .
22,816
def delete_translation ( self , language_code , related_name = None ) : if language_code is None : raise ValueError ( get_null_language_error ( ) ) if related_name is None : metas = self . _parler_meta else : metas = [ self . _parler_meta [ related_name ] ] num_deleted = 0 for meta in metas : try : translation = self . _get_translated_model ( language_code , meta = meta ) except meta . model . DoesNotExist : continue translation . delete ( ) num_deleted += 1 try : del self . _translations_cache [ meta . model ] [ language_code ] except KeyError : pass try : del self . _prefetched_objects_cache [ meta . rel_name ] except ( AttributeError , KeyError ) : pass if not num_deleted : raise ValueError ( "Translation does not exist: {0}" . format ( language_code ) ) return num_deleted
Delete a translation from a model .
22,817
def set_current_language ( self , language_code , initialize = False ) : self . _current_language = normalize_language_code ( language_code or get_language ( ) ) if initialize : self . _get_translated_model ( use_fallback = False , auto_create = True )
Switch the currently activate language of the object .
22,818
def get_fallback_languages ( self ) : lang_dict = get_language_settings ( self . _current_language ) fallbacks = [ lang for lang in lang_dict [ 'fallbacks' ] if lang != self . _current_language ] return fallbacks or [ ]
Return the fallback language codes which are used in case there is no translation for the currently active language .
22,819
def has_translation ( self , language_code = None , related_name = None ) : if language_code is None : language_code = self . _current_language if language_code is None : raise ValueError ( get_null_language_error ( ) ) meta = self . _parler_meta . _get_extension_by_related_name ( related_name ) try : return not is_missing ( self . _translations_cache [ meta . model ] [ language_code ] ) except KeyError : if language_code in self . _read_prefetched_translations ( meta = meta ) : return True object = get_cached_translation ( self , language_code , related_name = related_name , use_fallback = True ) if object is not None : return object . language_code == language_code try : self . _get_translated_model ( language_code , use_fallback = False , auto_create = False , meta = meta ) except meta . model . DoesNotExist : return False else : return True
Return whether a translation for the given language exists . Defaults to the current language code .
22,820
def get_available_languages ( self , related_name = None , include_unsaved = False ) : meta = self . _parler_meta . _get_extension_by_related_name ( related_name ) prefetch = self . _get_prefetched_translations ( meta = meta ) if prefetch is not None : db_languages = sorted ( obj . language_code for obj in prefetch ) else : qs = self . _get_translated_queryset ( meta = meta ) db_languages = qs . values_list ( 'language_code' , flat = True ) . order_by ( 'language_code' ) if include_unsaved : local_languages = ( k for k , v in six . iteritems ( self . _translations_cache [ meta . model ] ) if not is_missing ( v ) ) return list ( set ( db_languages ) | set ( local_languages ) ) else : return db_languages
Return the language codes of all translated variations .
22,821
def get_translation ( self , language_code , related_name = None ) : meta = self . _parler_meta . _get_extension_by_related_name ( related_name ) return self . _get_translated_model ( language_code , meta = meta )
Fetch the translated model
22,822
def _get_translated_model ( self , language_code = None , use_fallback = False , auto_create = False , meta = None ) : if self . _parler_meta is None : raise ImproperlyConfigured ( "No translation is assigned to the current model!" ) if self . _translations_cache is None : raise RuntimeError ( "Accessing translated fields before super.__init__() is not possible." ) if not language_code : language_code = self . _current_language if language_code is None : raise ValueError ( get_null_language_error ( ) ) if meta is None : meta = self . _parler_meta . root local_cache = self . _translations_cache [ meta . model ] try : object = local_cache [ language_code ] if not is_missing ( object ) : return object except KeyError : if not self . _state . adding and self . pk is not None : prefetch = self . _get_prefetched_translations ( meta = meta ) if prefetch is not None : for object in prefetch : if object . language_code == language_code : local_cache [ language_code ] = object _cache_translation ( object ) return object else : object = get_cached_translation ( self , language_code , related_name = meta . rel_name , use_fallback = use_fallback ) if object is not None : if object . language_code != language_code : local_cache [ language_code ] = MISSING local_cache [ object . language_code ] = object return object elif is_missing ( local_cache . get ( language_code , None ) ) : pass else : try : object = self . _get_translated_queryset ( meta ) . get ( language_code = language_code ) except meta . model . DoesNotExist : pass else : local_cache [ language_code ] = object _cache_translation ( object ) return object if auto_create : kwargs = { 'language_code' : language_code , } if self . pk : kwargs [ 'master' ] = self object = meta . model ( ** kwargs ) local_cache [ language_code ] = object return object fallback_msg = None lang_dict = get_language_settings ( language_code ) if language_code not in local_cache : local_cache [ language_code ] = MISSING if not self . _state . adding or self . pk is not None : _cache_translation_needs_fallback ( self , language_code , related_name = meta . rel_name ) fallback_choices = [ lang_dict [ 'code' ] ] + list ( lang_dict [ 'fallbacks' ] ) if use_fallback and fallback_choices : for fallback_lang in fallback_choices : if fallback_lang == language_code : continue try : return self . _get_translated_model ( fallback_lang , use_fallback = False , auto_create = auto_create , meta = meta ) except meta . model . DoesNotExist : pass fallback_msg = " (tried fallbacks {0})" . format ( ', ' . join ( lang_dict [ 'fallbacks' ] ) ) raise meta . model . DoesNotExist ( "{0} does not have a translation for the current language!\n" "{0} ID #{1}, language={2}{3}" . format ( self . _meta . verbose_name , self . pk , language_code , fallback_msg or '' ) )
Fetch the translated fields model .
22,823
def _get_any_translated_model ( self , meta = None ) : if meta is None : meta = self . _parler_meta . root tr_model = meta . model local_cache = self . _translations_cache [ tr_model ] if local_cache : check_languages = [ self . _current_language ] + self . get_fallback_languages ( ) try : for fallback_lang in check_languages : trans = local_cache . get ( fallback_lang , None ) if trans and not is_missing ( trans ) : return trans return next ( t for t in six . itervalues ( local_cache ) if not is_missing ( t ) ) except StopIteration : pass try : prefetch = self . _get_prefetched_translations ( meta = meta ) if prefetch is not None : translation = prefetch [ 0 ] else : translation = self . _get_translated_queryset ( meta = meta ) [ 0 ] except IndexError : return None else : local_cache [ translation . language_code ] = translation _cache_translation ( translation ) return translation
Return any available translation . Returns None if there are no translations at all .
22,824
def _get_translated_queryset ( self , meta = None ) : if meta is None : meta = self . _parler_meta . root accessor = getattr ( self , meta . rel_name ) return accessor . get_queryset ( )
Return the queryset that points to the translated model . If there is a prefetch it can be read from this queryset .
22,825
def _get_prefetched_translations ( self , meta = None ) : if meta is None : meta = self . _parler_meta . root related_name = meta . rel_name try : return self . _prefetched_objects_cache [ related_name ] except ( AttributeError , KeyError ) : return None
Return the queryset with prefetch results .
22,826
def validate_unique ( self , exclude = None ) : errors = { } try : super ( TranslatableModelMixin , self ) . validate_unique ( exclude = exclude ) except ValidationError as e : errors = e . message_dict for local_cache in six . itervalues ( self . _translations_cache ) : for translation in six . itervalues ( local_cache ) : if is_missing ( translation ) : continue try : translation . validate_unique ( exclude = exclude ) except ValidationError as e : errors . update ( e . message_dict ) if errors : raise ValidationError ( errors )
Also validate the unique_together of the translated model .
22,827
def save_translation ( self , translation , * args , ** kwargs ) : if self . pk is None or self . _state . adding : raise RuntimeError ( "Can't save translations when the master object is not yet saved." ) if translation . pk is None or translation . is_modified : if not translation . master_id : translation . _state . db = self . _state . db translation . master = self translation . save ( * args , ** kwargs )
Save the translation when it s modified or unsaved .
22,828
def safe_translation_getter ( self , field , default = None , language_code = None , any_language = False ) : meta = self . _parler_meta . _get_extension_by_field ( field ) if language_code and language_code != self . _current_language : try : tr_model = self . _get_translated_model ( language_code , meta = meta , use_fallback = True ) return getattr ( tr_model , field ) except TranslationDoesNotExist : pass else : try : return getattr ( self , field ) except TranslationDoesNotExist : pass if any_language : translation = self . _get_any_translated_model ( meta = meta ) if translation is not None : try : return getattr ( translation , field ) except KeyError : pass if callable ( default ) : return default ( ) else : return default
Fetch a translated property and return a default value when both the translation and fallback language are missing .
22,829
def _get_extension_by_field ( self , name ) : if name is None : raise TypeError ( "Expected field name" ) tr_model = self . get_model_by_field ( name ) for meta in self . _extensions : if meta . model == tr_model : return meta
Find the ParlerOptions object that corresponds with the given translated field .
22,830
def _validate_master ( new_class ) : if not new_class . master or not isinstance ( new_class . master , ForwardManyToOneDescriptor ) : raise ImproperlyConfigured ( "{0}.master should be a ForeignKey to the shared table." . format ( new_class . __name__ ) ) remote_field = new_class . master . field . remote_field shared_model = remote_field . model meta = shared_model . _parler_meta if meta is not None : if meta . _has_translations_model ( new_class ) : raise ImproperlyConfigured ( "The model '{0}' already has an associated translation table!" . format ( shared_model . __name__ ) ) if meta . _has_translations_field ( remote_field . related_name ) : raise ImproperlyConfigured ( "The model '{0}' already has an associated translation field named '{1}'!" . format ( shared_model . __name__ , remote_field . related_name ) ) return shared_model
Check whether the master field on a TranslatedFieldsModel is correctly configured .
22,831
def short_description ( self ) : translations_model = self . field . meta . model if translations_model is None : return pretty_name ( self . field . name ) field = translations_model . _meta . get_field ( self . field . name ) return field . verbose_name
Ensure that the admin list_display renders the correct verbose name for translated fields .
22,832
def get_queryset ( self , request ) : qs = super ( BaseTranslatableAdmin , self ) . get_queryset ( request ) if self . _has_translatable_model ( ) : if not isinstance ( qs , TranslatableQuerySet ) : raise ImproperlyConfigured ( "{0} class does not inherit from TranslatableQuerySet" . format ( qs . __class__ . __name__ ) ) qs_language = self . get_queryset_language ( request ) if qs_language : qs = qs . language ( qs_language ) return qs
Make sure the current language is selected .
22,833
def all_languages_column ( self , object ) : all_languages = [ code for code , __ in settings . LANGUAGES ] return mark_safe ( self . _languages_column ( object , all_languages , span_classes = 'all-languages' ) )
The language column which can be included in the list_display . It also shows untranslated languages
22,834
def get_available_languages ( self , obj ) : if obj : return obj . get_available_languages ( ) else : return self . model . _parler_meta . root_model . objects . none ( )
Fetching the available languages as queryset .
22,835
def get_object ( self , request , object_id , * args , ** kwargs ) : obj = super ( TranslatableAdmin , self ) . get_object ( request , object_id , * args , ** kwargs ) if obj is not None and self . _has_translatable_model ( ) : obj . set_current_language ( self . _language ( request , obj ) , initialize = True ) return obj
Make sure the object is fetched in the correct language .
22,836
def get_urls ( self ) : urlpatterns = super ( TranslatableAdmin , self ) . get_urls ( ) if not self . _has_translatable_model ( ) : return urlpatterns else : opts = self . model . _meta info = opts . app_label , opts . model_name return [ url ( r'^(.+)/change/delete-translation/(.+)/$' , self . admin_site . admin_view ( self . delete_translation ) , name = '{0}_{1}_delete_translation' . format ( * info ) ) ] + urlpatterns
Add a delete - translation view .
22,837
def render_change_form ( self , request , context , add = False , change = False , form_url = '' , obj = None ) : if self . _has_translatable_model ( ) : lang_code = self . get_form_language ( request , obj ) lang = get_language_title ( lang_code ) available_languages = self . get_available_languages ( obj ) language_tabs = self . get_language_tabs ( request , obj , available_languages ) context [ 'language_tabs' ] = language_tabs if language_tabs : context [ 'title' ] = '%s (%s)' % ( context [ 'title' ] , lang ) if not language_tabs . current_is_translated : add = True params = request . GET . dict ( ) params [ 'language' ] = lang_code form_url = add_preserved_filters ( { 'preserved_filters' : urlencode ( params ) , 'opts' : self . model . _meta } , form_url ) if 'default_change_form_template' not in context : context [ 'default_change_form_template' ] = self . default_change_form_template return super ( TranslatableAdmin , self ) . render_change_form ( request , context , add , change , form_url , obj )
Insert the language tabs .
22,838
def deletion_not_allowed ( self , request , obj , language_code ) : opts = self . model . _meta context = { 'object' : obj . master , 'language_code' : language_code , 'opts' : opts , 'app_label' : opts . app_label , 'language_name' : get_language_title ( language_code ) , 'object_name' : force_text ( opts . verbose_name ) } return render ( request , self . deletion_not_allowed_template , context )
Deletion - not - allowed view .
22,839
def get_translation_objects ( self , request , language_code , obj = None , inlines = True ) : if obj is not None : for translations_model in obj . _parler_meta . get_all_models ( ) : try : translation = translations_model . objects . get ( master = obj , language_code = language_code ) except translations_model . DoesNotExist : continue yield [ translation ] if inlines : for inline , qs in self . _get_inline_translations ( request , language_code , obj = obj ) : yield qs
Return all objects that should be deleted when a translation is deleted . This method can yield all QuerySet objects or lists for the objects .
22,840
def _get_inline_translations ( self , request , language_code , obj = None ) : inline_instances = self . get_inline_instances ( request , obj = obj ) for inline in inline_instances : if issubclass ( inline . model , TranslatableModelMixin ) : fk = inline . get_formset ( request , obj ) . fk rel_name = 'master__{0}' . format ( fk . name ) filters = { 'language_code' : language_code , rel_name : obj } for translations_model in inline . model . _parler_meta . get_all_models ( ) : qs = translations_model . objects . filter ( ** filters ) if obj is not None : qs = qs . using ( obj . _state . db ) yield inline , qs
Fetch the inline translations
22,841
def default_change_form_template ( self ) : opts = self . model . _meta app_label = opts . app_label return select_template_name ( ( "admin/{0}/{1}/change_form.html" . format ( app_label , opts . object_name . lower ( ) ) , "admin/{0}/change_form.html" . format ( app_label ) , "admin/change_form.html" ) )
Determine what the actual change_form_template should be .
22,842
def get_formset ( self , request , obj = None , ** kwargs ) : FormSet = super ( TranslatableInlineModelAdmin , self ) . get_formset ( request , obj , ** kwargs ) FormSet . language_code = self . get_form_language ( request , obj ) if self . inline_tabs : available_languages = self . get_available_languages ( obj , FormSet ) FormSet . language_tabs = self . get_language_tabs ( request , obj , available_languages , css_class = 'parler-inline-language-tabs' ) FormSet . language_tabs . allow_deletion = self . _has_translatable_parent_model ( ) return FormSet
Return the formset and provide the language information to the formset .
22,843
def get_available_languages ( self , obj , formset ) : if obj : filter = { 'master__{0}' . format ( formset . fk . name ) : obj } return self . model . _parler_meta . root_model . objects . using ( obj . _state . db ) . filter ( ** filter ) . values_list ( 'language_code' , flat = True ) . distinct ( ) . order_by ( 'language_code' ) else : return self . model . _parler_meta . root_model . objects . none ( )
Fetching the available inline languages as queryset .
22,844
def language ( self , language_code = None ) : if language_code is None : language_code = appsettings . PARLER_LANGUAGES . get_default_language ( ) self . _language = language_code return self
Set the language code to assign to objects retrieved using this QuerySet .
22,845
def translated ( self , * language_codes , ** translated_fields ) : relname = self . model . _parler_meta . root_rel_name if not language_codes : language_codes = ( get_language ( ) , ) filters = { } for field_name , val in six . iteritems ( translated_fields ) : if field_name . startswith ( 'master__' ) : filters [ field_name [ 8 : ] ] = val else : filters [ "{0}__{1}" . format ( relname , field_name ) ] = val if len ( language_codes ) == 1 : filters [ relname + '__language_code' ] = language_codes [ 0 ] return self . filter ( ** filters ) else : filters [ relname + '__language_code__in' ] = language_codes return self . filter ( ** filters ) . distinct ( )
Only return translated objects which of the given languages .
22,846
def active_translations ( self , language_code = None , ** translated_fields ) : language_codes = get_active_language_choices ( language_code ) return self . translated ( * language_codes , ** translated_fields )
Only return objects which are translated or have a fallback that should be displayed .
22,847
def get_language_title ( language_code ) : from parler import appsettings if not language_code : raise ValueError ( "Missing language_code in get_language_title()" ) if appsettings . PARLER_SHOW_EXCLUDED_LANGUAGE_TABS : languages = ALL_LANGUAGES_DICT else : languages = LANGUAGES_DICT try : return _ ( languages [ language_code ] ) except KeyError : language_code = language_code . split ( '-' ) [ 0 ] language_title = languages . get ( language_code , None ) if language_title is not None : return _ ( language_title ) else : return language_code
Return the verbose_name for a language code .
22,848
def is_multilingual_project ( site_id = None ) : from parler import appsettings if site_id is None : site_id = getattr ( settings , 'SITE_ID' , None ) return appsettings . PARLER_SHOW_EXCLUDED_LANGUAGE_TABS or site_id in appsettings . PARLER_LANGUAGES
Whether the current Django project is configured for multilingual support .
22,849
def __timestamp ( ) : today = time . time ( ) ret = struct . pack ( b'=L' , int ( today ) ) return ret
Generate timestamp data for pyc header .
22,850
def _get_scripts_resource ( pe ) : res = None for entry in pe . DIRECTORY_ENTRY_RESOURCE . entries : if entry . name and entry . name . string == b"PYTHONSCRIPT" : res = entry . directory . entries [ 0 ] . directory . entries [ 0 ] break return res
Return the PYTHONSCRIPT resource entry .
22,851
def _resource_dump ( pe , res ) : rva = res . data . struct . OffsetToData size = res . data . struct . Size dump = pe . get_data ( rva , size ) return dump
Return the dump of the given resource .
22,852
def _get_co_from_dump ( data ) : current = struct . calcsize ( b'iiii' ) metadata = struct . unpack ( b'iiii' , data [ : current ] ) logging . info ( "Magic value: %x" , metadata [ 0 ] ) logging . info ( "Code bytes length: %d" , metadata [ 3 ] ) arcname = '' while six . indexbytes ( data , current ) != 0 : arcname += chr ( six . indexbytes ( data , current ) ) current += 1 logging . info ( "Archive name: %s" , arcname or '-' ) code_bytes = data [ current + 1 : ] code_objects = marshal . loads ( code_bytes ) return code_objects
Return the code objects from the dump .
22,853
def check_py2exe_file ( pe ) : py2exe_resource = _get_scripts_resource ( pe ) if py2exe_resource is None : logging . info ( 'This is not a py2exe executable.' ) if pe . __data__ . find ( b'pyi-windows-manifest-filename' ) : logging . info ( 'This seems a pyinstaller executable (unsupported).' ) return bool ( py2exe_resource )
Check file is a py2exe executable .
22,854
def extract_code_objects ( pe ) : script_res = _get_scripts_resource ( pe ) dump = _resource_dump ( pe , script_res ) return _get_co_from_dump ( dump )
Extract Python code objects from a py2exe executable .
22,855
def dump_to_pyc ( co , python_version , output_dir ) : pyc_basename = ntpath . basename ( co . co_filename ) pyc_name = pyc_basename + '.pyc' if pyc_name not in IGNORE : logging . info ( "Extracting %s" , pyc_name ) pyc_header = _generate_pyc_header ( python_version , len ( co . co_code ) ) destination = os . path . join ( output_dir , pyc_name ) pyc = open ( destination , 'wb' ) pyc . write ( pyc_header ) marshaled_code = marshal . dumps ( co ) pyc . write ( marshaled_code ) pyc . close ( ) else : logging . info ( "Skipping %s" , pyc_name )
Save given code_object as a . pyc file .
22,856
def unpy2exe ( filename , python_version = None , output_dir = None ) : if output_dir is None : output_dir = '.' elif not os . path . exists ( output_dir ) : os . makedirs ( output_dir ) pe = pefile . PE ( filename ) is_py2exe = check_py2exe_file ( pe ) if not is_py2exe : raise ValueError ( 'Not a py2exe executable.' ) code_objects = extract_code_objects ( pe ) for co in code_objects : dump_to_pyc ( co , python_version , output_dir )
Process input params and produce output pyc files .
22,857
def are_all_nodes_discovered ( self ) : undiscovered = self . find_all ( lambda e : not e . discovered ) return len ( list ( undiscovered ) ) == 0
Reports whether there are nodes whose node info is still unknown .
22,858
def _path ( self , attrpath ) : attr , _ , subpath = attrpath . partition ( "." ) if attr not in self . __dict__ : self . __dict__ [ attr ] = Namespace ( ) self . __namespaces . add ( attr ) if subpath : return self . __dict__ [ attr ] . _path ( subpath ) else : return self . __dict__ [ attr ]
Returns the namespace object at the given . - separated path creating any namespaces in the path that don t already exist .
22,859
def add ( self , data_bytes ) : try : if isinstance ( data_bytes , basestring ) : data_bytes = map ( ord , data_bytes ) except NameError : if isinstance ( data_bytes , str ) : data_bytes = map ( ord , data_bytes ) for b in data_bytes : self . _crc ^= ( b << 56 ) & Signature . MASK64 for _ in range ( 8 ) : if self . _crc & ( 1 << 63 ) : self . _crc = ( ( self . _crc << 1 ) & Signature . MASK64 ) ^ Signature . POLY else : self . _crc <<= 1
Feed ASCII string or bytes to the signature function
22,860
def get_max_bitlen ( self ) : payload_max_bitlen = self . max_size * self . value_type . get_max_bitlen ( ) return { self . MODE_DYNAMIC : payload_max_bitlen + self . max_size . bit_length ( ) , self . MODE_STATIC : payload_max_bitlen } [ self . mode ]
Returns total maximum bit length of the array including length field if applicable .
22,861
def get_dsdl_signature_source_definition ( self ) : txt = StringIO ( ) txt . write ( self . full_name + '\n' ) def adjoin ( attrs ) : return txt . write ( '\n' . join ( x . get_normalized_definition ( ) for x in attrs ) + '\n' ) if self . kind == CompoundType . KIND_SERVICE : if self . request_union : txt . write ( '\n@union\n' ) adjoin ( self . request_fields ) txt . write ( '\n---\n' ) if self . response_union : txt . write ( '\n@union\n' ) adjoin ( self . response_fields ) elif self . kind == CompoundType . KIND_MESSAGE : if self . union : txt . write ( '\n@union\n' ) adjoin ( self . fields ) else : error ( 'Compound type of unknown kind [%s]' , self . kind ) return txt . getvalue ( ) . strip ( ) . replace ( '\n\n\n' , '\n' ) . replace ( '\n\n' , '\n' )
Returns normalized DSDL definition text . Please refer to the specification for details about normalized DSDL definitions .
22,862
def get_data_type_signature ( self ) : if self . _data_type_signature is None : sig = Signature ( self . get_dsdl_signature ( ) ) fields = self . request_fields + self . response_fields if self . kind == CompoundType . KIND_SERVICE else self . fields for field in fields : field_sig = field . type . get_data_type_signature ( ) if field_sig is not None : sig_value = sig . get_value ( ) sig . add ( bytes_from_crc64 ( field_sig ) ) sig . add ( bytes_from_crc64 ( sig_value ) ) self . _data_type_signature = sig . get_value ( ) return self . _data_type_signature
Computes data type signature of this type . The data type signature is guaranteed to match only if all nested data structures are compatible . Please refer to the specification for details about signatures .
22,863
def close ( self ) : self . _handle . remove ( ) self . _node_monitor_event_handle . remove ( ) self . _allocation_table . close ( )
Stops the instance and closes the allocation table storage .
22,864
def pretty_filename ( filename ) : try : a = os . path . abspath ( filename ) r = os . path . relpath ( filename ) except ValueError : a = r = filename return a if '..' in r else r
Returns a nice human readable path to filename .
22,865
def write_event ( self , event ) : if not isinstance ( event , event_pb2 . Event ) : raise TypeError ( "expected an event_pb2.Event proto, " " but got %s" % type ( event ) ) return self . _write_serialized_event ( event . SerializeToString ( ) )
Appends event to the file .
22,866
def flush ( self ) : if self . _num_outstanding_events == 0 or self . _recordio_writer is None : return self . _recordio_writer . flush ( ) if self . _logger is not None : self . _logger . info ( 'wrote %d %s to disk' , self . _num_outstanding_events , 'event' if self . _num_outstanding_events == 1 else 'events' ) self . _num_outstanding_events = 0
Flushes the event file to disk .
22,867
def close ( self ) : self . flush ( ) if self . _recordio_writer is not None : self . _recordio_writer . close ( ) self . _recordio_writer = None
Flushes the pending events and closes the writer after it is done .
22,868
def close ( self ) : if not self . _closed : self . add_event ( self . _sentinel_event ) self . flush ( ) self . _worker . join ( ) self . _ev_writer . close ( ) self . _closed = True
Flushes the event file to disk and close the file . Call this method when you do not need the summary writer anymore .
22,869
def write_record ( self , event_str ) : header = struct . pack ( 'Q' , len ( event_str ) ) header += struct . pack ( 'I' , masked_crc32c ( header ) ) footer = struct . pack ( 'I' , masked_crc32c ( event_str ) ) self . _writer . write ( header + event_str + footer )
Writes a serialized event to file .
22,870
def close ( self ) : if self . _writer is not None : self . flush ( ) self . _writer . close ( ) self . _writer = None
Closes the record writer .
22,871
def add_summary ( self , summary , global_step = None ) : if isinstance ( summary , bytes ) : summ = summary_pb2 . Summary ( ) summ . ParseFromString ( summary ) summary = summ for value in summary . value : if not value . metadata : continue if value . tag in self . _seen_summary_tags : value . ClearField ( "metadata" ) continue self . _seen_summary_tags . add ( value . tag ) event = event_pb2 . Event ( summary = summary ) self . _add_event ( event , global_step )
Adds a Summary protocol buffer to the event file . This method wraps the provided summary in an Event protocol buffer and adds it to the event file .
22,872
def add_graph ( self , graph ) : event = event_pb2 . Event ( graph_def = graph . SerializeToString ( ) ) self . _add_event ( event , None )
Adds a Graph protocol buffer to the event file .
22,873
def add_scalar ( self , tag , value , global_step = None ) : if isinstance ( value , ( tuple , list , dict ) ) : if isinstance ( value , ( tuple , list ) ) : if len ( value ) != 2 : raise ValueError ( 'expected two elements in value, while received %d' % len ( value ) ) value = { value [ 0 ] : value [ 1 ] } self . _add_scalars ( tag , value , global_step ) else : self . _file_writer . add_summary ( scalar_summary ( tag , value ) , global_step ) self . _append_to_scalar_dict ( self . get_logdir ( ) + '/' + tag , value , global_step , time . time ( ) )
Adds scalar data to the event file .
22,874
def _add_scalars ( self , tag , scalar_dict , global_step = None ) : timestamp = time . time ( ) fw_logdir = self . _file_writer . get_logdir ( ) for scalar_name , scalar_value in scalar_dict . items ( ) : fw_tag = fw_logdir + '/' + tag + '/' + scalar_name if fw_tag in self . _all_writers . keys ( ) : fw = self . _all_writers [ fw_tag ] else : fw = FileWriter ( logdir = fw_tag , max_queue = self . _max_queue , flush_secs = self . _flush_secs , filename_suffix = self . _filename_suffix , verbose = self . _verbose ) self . _all_writers [ fw_tag ] = fw fw . add_summary ( scalar_summary ( tag , scalar_value ) , global_step ) self . _append_to_scalar_dict ( fw_tag , scalar_value , global_step , timestamp )
Adds multiple scalars to summary . This enables drawing multiple curves in one plot .
22,875
def add_histogram ( self , tag , values , global_step = None , bins = 'default' ) : if bins == 'default' : bins = self . _get_default_bins ( ) self . _file_writer . add_summary ( histogram_summary ( tag , values , bins ) , global_step )
Add histogram data to the event file .
22,876
def add_image ( self , tag , image , global_step = None ) : self . _file_writer . add_summary ( image_summary ( tag , image ) , global_step )
Add image data to the event file . This function supports input as a 2D 3D or 4D image . If the input image is 2D a channel axis is prepended as the first dimension and image will be replicated three times and concatenated along the channel axis . If the input image is 3D it will be replicated three times and concatenated along the channel axis . If the input image is 4D which is a batch images all the images will be spliced as a sprite image for display .
22,877
def add_audio ( self , tag , audio , sample_rate = 44100 , global_step = None ) : self . _file_writer . add_summary ( audio_summary ( tag , audio , sample_rate = sample_rate ) , global_step )
Add audio data to the event file .
22,878
def add_text ( self , tag , text , global_step = None ) : self . _file_writer . add_summary ( text_summary ( tag , text ) , global_step ) if tag not in self . _text_tags : self . _text_tags . append ( tag ) extension_dir = self . get_logdir ( ) + '/plugins/tensorboard_text/' if not os . path . exists ( extension_dir ) : os . makedirs ( extension_dir ) with open ( extension_dir + 'tensors.json' , 'w' ) as fp : json . dump ( self . _text_tags , fp )
Add text data to the event file .
22,879
def add_pr_curve ( self , tag , labels , predictions , num_thresholds , global_step = None , weights = None ) : if num_thresholds < 2 : raise ValueError ( 'num_thresholds must be >= 2' ) labels = _make_numpy_array ( labels ) predictions = _make_numpy_array ( predictions ) self . _file_writer . add_summary ( pr_curve_summary ( tag , labels , predictions , num_thresholds , weights ) , global_step )
Adds precision - recall curve .
22,880
def _rectangular ( n ) : for i in n : if len ( i ) != len ( n [ 0 ] ) : return False return True
Checks to see if a 2D list is a valid 2D matrix
22,881
def _is_2D_matrix ( matrix ) : return ( ( isinstance ( matrix [ 0 ] , list ) and _rectangular ( matrix ) and not isinstance ( matrix [ 0 ] [ 0 ] , list ) ) or ( not isinstance ( matrix , list ) and matrix . shape == 2 ) )
Checks to see if a ndarray is 2D or a list of lists is 2D
22,882
def _save_image ( image , filename , nrow = 8 , padding = 2 , square_image = True ) : if not isinstance ( image , NDArray ) : raise TypeError ( 'MXNet NDArray expected, received {}' . format ( str ( type ( image ) ) ) ) image = _prepare_image ( image , nrow = nrow , padding = padding , square_image = square_image ) if Image is None : raise ImportError ( 'saving image failed because PIL is not found' ) im = Image . fromarray ( image . asnumpy ( ) ) im . save ( filename )
Saves a given Tensor into an image file . If the input tensor contains multiple images a grid of images will be saved .
22,883
def _save_embedding_tsv ( data , file_path ) : if isinstance ( data , np . ndarray ) : data_list = data . tolist ( ) elif isinstance ( data , NDArray ) : data_list = data . asnumpy ( ) . tolist ( ) else : raise TypeError ( 'expected NDArray of np.ndarray, while received type {}' . format ( str ( type ( data ) ) ) ) with open ( os . path . join ( file_path , 'tensors.tsv' ) , 'w' ) as f : for x in data_list : x = [ str ( i ) for i in x ] f . write ( '\t' . join ( x ) + '\n' )
Given a 2D NDarray or a numpy . ndarray as embeding save it in tensors . tsv under the path provided by the user .
22,884
def _make_image ( tensor ) : assert isinstance ( tensor , NDArray ) if Image is None : raise ImportError ( 'need to install PIL for visualizing images' ) height , width , channel = tensor . shape tensor = _make_numpy_array ( tensor ) image = Image . fromarray ( tensor ) output = io . BytesIO ( ) image . save ( output , format = 'PNG' ) image_string = output . getvalue ( ) output . close ( ) return Summary . Image ( height = height , width = width , colorspace = channel , encoded_image_string = image_string )
Converts an NDArray type image to Image protobuf
22,885
def pr_curve_summary ( tag , labels , predictions , num_thresholds , weights = None ) : if num_thresholds > 127 : logging . warning ( 'num_thresholds>127 would result in failure of creating pr_curve protobuf,' ' clipping it at 127' ) num_thresholds = 127 labels = _make_numpy_array ( labels ) predictions = _make_numpy_array ( predictions ) if weights is not None : weights = _make_numpy_array ( weights ) data = _compute_curve ( labels , predictions , num_thresholds = num_thresholds , weights = weights ) pr_curve_plugin_data = PrCurvePluginData ( version = 0 , num_thresholds = num_thresholds ) . SerializeToString ( ) plugin_data = [ SummaryMetadata . PluginData ( plugin_name = 'pr_curves' , content = pr_curve_plugin_data ) ] smd = SummaryMetadata ( plugin_data = plugin_data ) tensor = TensorProto ( dtype = 'DT_FLOAT' , float_val = data . reshape ( - 1 ) . tolist ( ) , tensor_shape = TensorShapeProto ( dim = [ TensorShapeProto . Dim ( size = data . shape [ 0 ] ) , TensorShapeProto . Dim ( size = data . shape [ 1 ] ) ] ) ) return Summary ( value = [ Summary . Value ( tag = tag , metadata = smd , tensor = tensor ) ] )
Outputs a precision - recall curve Summary protocol buffer .
22,886
def _get_nodes_from_symbol ( sym ) : if not isinstance ( sym , Symbol ) : raise TypeError ( 'sym must be an `mxnet.symbol.Symbol`,' ' received type {}' . format ( str ( type ( sym ) ) ) ) conf = json . loads ( sym . tojson ( ) ) nodes = conf [ 'nodes' ] data2op = { } for i , node in enumerate ( nodes ) : if node [ 'op' ] != 'null' : input_list = node [ 'inputs' ] for idx in input_list : if idx [ 0 ] == 0 : continue if idx [ 0 ] in data2op : data2op [ idx [ 0 ] ] . append ( i ) else : data2op [ idx [ 0 ] ] = [ i ] node_defs = [ ] for i , node in enumerate ( nodes ) : node_name = node [ 'name' ] op_name = node [ 'op' ] kwargs = { 'op' : op_name , 'name' : node_name } if op_name != 'null' : inputs = [ ] input_list = node [ 'inputs' ] for idx in input_list : input_node = nodes [ idx [ 0 ] ] input_node_name = input_node [ 'name' ] if input_node [ 'op' ] != 'null' : inputs . append ( _scoped_name ( input_node_name , input_node_name ) ) elif idx [ 0 ] in data2op and len ( data2op [ idx [ 0 ] ] ) == 1 and data2op [ idx [ 0 ] ] [ 0 ] == i : inputs . append ( _scoped_name ( node_name , input_node_name ) ) else : inputs . append ( input_node_name ) kwargs [ 'input' ] = inputs kwargs [ 'name' ] = _scoped_name ( node_name , node_name ) elif i in data2op and len ( data2op [ i ] ) == 1 : op_node_name = nodes [ data2op [ i ] [ 0 ] ] [ 'name' ] kwargs [ 'name' ] = _scoped_name ( op_node_name , node_name ) if 'attrs' in node : attr = json . dumps ( node [ 'attrs' ] , sort_keys = True ) . replace ( "\"" , ' ' ) attr = { 'param' : AttrValue ( s = attr . encode ( encoding = 'utf-8' ) ) } kwargs [ 'attr' ] = attr node_def = NodeDef ( ** kwargs ) node_defs . append ( node_def ) return node_defs
Given a symbol and shapes return a list of NodeDef s for visualizing the the graph in TensorBoard .
22,887
def temporal_derivatives ( order , variables , data ) : variables_deriv = OrderedDict ( ) data_deriv = OrderedDict ( ) if 0 in order : data_deriv [ 0 ] = data [ variables ] variables_deriv [ 0 ] = variables order = set ( order ) - set ( [ 0 ] ) for o in order : variables_deriv [ o ] = [ '{}_derivative{}' . format ( v , o ) for v in variables ] data_deriv [ o ] = np . tile ( np . nan , data [ variables ] . shape ) data_deriv [ o ] [ o : , : ] = np . diff ( data [ variables ] , n = o , axis = 0 ) variables_deriv = reduce ( ( lambda x , y : x + y ) , variables_deriv . values ( ) ) data_deriv = pd . DataFrame ( columns = variables_deriv , data = np . concatenate ( [ * data_deriv . values ( ) ] , axis = 1 ) ) return ( variables_deriv , data_deriv )
Compute temporal derivative terms by the method of backwards differences .
22,888
def exponential_terms ( order , variables , data ) : variables_exp = OrderedDict ( ) data_exp = OrderedDict ( ) if 1 in order : data_exp [ 1 ] = data [ variables ] variables_exp [ 1 ] = variables order = set ( order ) - set ( [ 1 ] ) for o in order : variables_exp [ o ] = [ '{}_power{}' . format ( v , o ) for v in variables ] data_exp [ o ] = data [ variables ] ** o variables_exp = reduce ( ( lambda x , y : x + y ) , variables_exp . values ( ) ) data_exp = pd . DataFrame ( columns = variables_exp , data = np . concatenate ( [ * data_exp . values ( ) ] , axis = 1 ) ) return ( variables_exp , data_exp )
Compute exponential expansions .
22,889
def _order_as_range ( order ) : order = order . split ( '-' ) order = [ int ( o ) for o in order ] if len ( order ) > 1 : order = range ( order [ 0 ] , ( order [ - 1 ] + 1 ) ) return order
Convert a hyphenated string representing order for derivative or exponential terms into a range object that can be passed as input to the appropriate expansion function .
22,890
def _check_and_expand_exponential ( expr , variables , data ) : if re . search ( r'\^\^[0-9]+$' , expr ) : order = re . compile ( r'\^\^([0-9]+)$' ) . findall ( expr ) order = range ( 1 , int ( * order ) + 1 ) variables , data = exponential_terms ( order , variables , data ) elif re . search ( r'\^[0-9]+[\-]?[0-9]*$' , expr ) : order = re . compile ( r'\^([0-9]+[\-]?[0-9]*)' ) . findall ( expr ) order = _order_as_range ( * order ) variables , data = exponential_terms ( order , variables , data ) return variables , data
Check if the current operation specifies exponential expansion . ^^6 specifies all powers up to the 6th ^5 - 6 the 5th and 6th powers ^6 the 6th only .
22,891
def _check_and_expand_derivative ( expr , variables , data ) : if re . search ( r'^dd[0-9]+' , expr ) : order = re . compile ( r'^dd([0-9]+)' ) . findall ( expr ) order = range ( 0 , int ( * order ) + 1 ) ( variables , data ) = temporal_derivatives ( order , variables , data ) elif re . search ( r'^d[0-9]+[\-]?[0-9]*' , expr ) : order = re . compile ( r'^d([0-9]+[\-]?[0-9]*)' ) . findall ( expr ) order = _order_as_range ( * order ) ( variables , data ) = temporal_derivatives ( order , variables , data ) return variables , data
Check if the current operation specifies a temporal derivative . dd6x specifies all derivatives up to the 6th d5 - 6x the 5th and 6th d6x the 6th only .
22,892
def _check_and_expand_subformula ( expression , parent_data , variables , data ) : grouping_depth = 0 for i , char in enumerate ( expression ) : if char == '(' : if grouping_depth == 0 : formula_delimiter = i + 1 grouping_depth += 1 elif char == ')' : grouping_depth -= 1 if grouping_depth == 0 : expr = expression [ formula_delimiter : i ] . strip ( ) return parse_formula ( expr , parent_data ) return variables , data
Check if the current operation contains a suboperation and parse it where appropriate .
22,893
def parse_expression ( expression , parent_data ) : variables = None data = None variables , data = _check_and_expand_subformula ( expression , parent_data , variables , data ) variables , data = _check_and_expand_exponential ( expression , variables , data ) variables , data = _check_and_expand_derivative ( expression , variables , data ) if variables is None : expr = expression . strip ( ) variables = [ expr ] data = parent_data [ expr ] return variables , data
Parse an expression in a model formula .
22,894
def _expand_shorthand ( model_formula , variables ) : wm = 'white_matter' gsr = 'global_signal' rps = 'trans_x + trans_y + trans_z + rot_x + rot_y + rot_z' fd = 'framewise_displacement' acc = _get_matches_from_data ( 'a_comp_cor_[0-9]+' , variables ) tcc = _get_matches_from_data ( 't_comp_cor_[0-9]+' , variables ) dv = _get_matches_from_data ( '^std_dvars$' , variables ) dvall = _get_matches_from_data ( '.*dvars' , variables ) nss = _get_matches_from_data ( 'non_steady_state_outlier[0-9]+' , variables ) spikes = _get_matches_from_data ( 'motion_outlier[0-9]+' , variables ) model_formula = re . sub ( 'wm' , wm , model_formula ) model_formula = re . sub ( 'gsr' , gsr , model_formula ) model_formula = re . sub ( 'rps' , rps , model_formula ) model_formula = re . sub ( 'fd' , fd , model_formula ) model_formula = re . sub ( 'acc' , acc , model_formula ) model_formula = re . sub ( 'tcc' , tcc , model_formula ) model_formula = re . sub ( 'dv' , dv , model_formula ) model_formula = re . sub ( 'dvall' , dvall , model_formula ) model_formula = re . sub ( 'nss' , nss , model_formula ) model_formula = re . sub ( 'spikes' , spikes , model_formula ) formula_variables = _get_variables_from_formula ( model_formula ) others = ' + ' . join ( set ( variables ) - set ( formula_variables ) ) model_formula = re . sub ( 'others' , others , model_formula ) return model_formula
Expand shorthand terms in the model formula .
22,895
def _unscramble_regressor_columns ( parent_data , data ) : matches = [ '_power[0-9]+' , '_derivative[0-9]+' ] var = OrderedDict ( ( c , deque ( ) ) for c in parent_data . columns ) for c in data . columns : col = c for m in matches : col = re . sub ( m , '' , col ) if col == c : var [ col ] . appendleft ( c ) else : var [ col ] . append ( c ) unscrambled = reduce ( ( lambda x , y : x + y ) , var . values ( ) ) return data [ [ * unscrambled ] ]
Reorder the columns of a confound matrix such that the columns are in the same order as the input data with any expansion columns inserted immediately after the originals .
22,896
def parse_formula ( model_formula , parent_data , unscramble = False ) : variables = { } data = { } expr_delimiter = 0 grouping_depth = 0 model_formula = _expand_shorthand ( model_formula , parent_data . columns ) for i , char in enumerate ( model_formula ) : if char == '(' : grouping_depth += 1 elif char == ')' : grouping_depth -= 1 elif grouping_depth == 0 and char == '+' : expression = model_formula [ expr_delimiter : i ] . strip ( ) variables [ expression ] = None data [ expression ] = None expr_delimiter = i + 1 expression = model_formula [ expr_delimiter : ] . strip ( ) variables [ expression ] = None data [ expression ] = None for expression in list ( variables ) : if expression [ 0 ] == '(' and expression [ - 1 ] == ')' : ( variables [ expression ] , data [ expression ] ) = parse_formula ( expression [ 1 : - 1 ] , parent_data ) else : ( variables [ expression ] , data [ expression ] ) = parse_expression ( expression , parent_data ) variables = list ( set ( reduce ( ( lambda x , y : x + y ) , variables . values ( ) ) ) ) data = pd . concat ( ( data . values ( ) ) , axis = 1 ) if unscramble : data = _unscramble_regressor_columns ( parent_data , data ) return variables , data
Recursively parse a model formula by breaking it into additive atoms and tracking grouping symbol depth .
22,897
def mask ( in_file , mask_file , new_name ) : import nibabel as nb import os in_nii = nb . load ( in_file ) mask_nii = nb . load ( mask_file ) data = in_nii . get_data ( ) data [ mask_nii . get_data ( ) == 0 ] = 0 new_nii = nb . Nifti1Image ( data , in_nii . affine , in_nii . header ) new_nii . to_filename ( new_name ) return os . path . abspath ( new_name )
Apply a binary mask to an image .
22,898
def create_cfm ( in_file , lesion_mask = None , global_mask = True , out_path = None ) : import os import numpy as np import nibabel as nb from nipype . utils . filemanip import fname_presuffix if out_path is None : out_path = fname_presuffix ( in_file , suffix = '_cfm' , newpath = os . getcwd ( ) ) else : out_path = os . path . abspath ( out_path ) if not global_mask and not lesion_mask : NIWORKFLOWS_LOG . warning ( 'No lesion mask was provided and global_mask not requested, ' 'therefore the original mask will not be modified.' ) in_img = nb . load ( in_file ) data = np . ones ( in_img . shape , dtype = np . uint8 ) if global_mask else in_img . get_data ( ) if set ( np . unique ( data ) ) - { 0 , 1 } : raise ValueError ( "`global_mask` must be true if `in_file` is not a binary mask" ) if lesion_mask is not None : lm_img = nb . as_closest_canonical ( nb . load ( lesion_mask ) ) data = np . fmax ( data - lm_img . get_data ( ) , 0 ) cfm_img = nb . Nifti1Image ( data , in_img . affine , in_img . header ) cfm_img . set_data_dtype ( np . uint8 ) cfm_img . to_filename ( out_path ) return out_path
Create a mask to constrain registration .
22,899
def _get_settings ( self ) : if isdefined ( self . inputs . settings ) : NIWORKFLOWS_LOG . info ( 'User-defined settings, overriding defaults' ) return self . inputs . settings filestart = '{}-mni_registration_{}_' . format ( self . inputs . moving . lower ( ) , self . inputs . flavor ) filenames = [ i for i in pkgr . resource_listdir ( 'niworkflows' , 'data' ) if i . startswith ( filestart ) and i . endswith ( '.json' ) ] return [ pkgr . resource_filename ( 'niworkflows.data' , f ) for f in sorted ( filenames ) ]
Return any settings defined by the user as well as any pre - defined settings files that exist for the image modalities to be registered .