diff --git a/.gitattributes b/.gitattributes index 0f6b77fecbd01e28c5bdecb7caf5c41677da8a5d..b39399287eadbff2251356f3ebdf2f8d5dfc3e82 100644 --- a/.gitattributes +++ b/.gitattributes @@ -496,3 +496,5 @@ material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space: material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/pillow.libs/libxcb-55eab65a.so.1.1.0 filter=lfs diff=lfs merge=lfs -text material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected/1908304_m.tif filter=lfs diff=lfs merge=lfs -text material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected_4Classes/1908276_cm.tif filter=lfs diff=lfs merge=lfs -text +material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libaec-7e9d22b8.so.0.1.3 filter=lfs diff=lfs merge=lfs -text +material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected_4Classes/1908318_cm.tif filter=lfs diff=lfs merge=lfs -text diff --git a/material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected_4Classes/1908318_cm.tif b/material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected_4Classes/1908318_cm.tif new file mode 100644 index 0000000000000000000000000000000000000000..5c6282554d0aac6449a9554d105476ba2822b370 --- /dev/null +++ b/material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected_4Classes/1908318_cm.tif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa9f3d1e45be0ff23b4dae0e6e90392ac26888234c1b71514099fc17ab9df69d +size 2162842 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libaec-7e9d22b8.so.0.1.3 b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libaec-7e9d22b8.so.0.1.3 new file mode 100644 index 0000000000000000000000000000000000000000..04a0fcb757c7bd84c50fd687244d9ee7ed4809c3 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libaec-7e9d22b8.so.0.1.3 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0788adca6f4440b7f72f1520c90a9bf19fb3c0a28202a6a3fbeda3fe525dd9d8 +size 135913 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..435187361b661f70df0351599beb392b30b3e91a Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/__init__.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9af32e045406733f5522575b48224f4bdf58b69e Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_api.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90fba342ce34f538af10bbfb2cbe2dc6edbf2ada Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_client.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d642283f92a2ee29b9002e74e654b15fbe3c6a98 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_config.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe3e7add3bae09eb64c236bc312d8eac4eda8ed9 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_content.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee6db3031b8d5a860546efba88ad1110c99e999c Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_exceptions.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6203c2f84cf47128623082d87465fb344552aefb Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_models.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6052ff9f97dc7a1cc3449460f2d21a454ca43d23 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_status_codes.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb95e806130e5bf1c450155e8969bcf689b94bb7 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_types.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d72fc0f89ea34ed332858e1718e914aed6342deb Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/__pycache__/_urlparse.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/helpers.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..046d29037546826b980b09922595ed396aa361df Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/helpers.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/imports.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/imports.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7e403ac43994766d3f9399274cb8634e08f2c54 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/imports.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/param.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/param.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50eabaab87d7fb66f9c11c45e1df3299dff0017c Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/inference/__pycache__/param.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__init__.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8067676d03ee07725b5aa44e99303701381dc224 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__init__.py @@ -0,0 +1,47 @@ +from functools import wraps + + +class _PluginManager: + def __init__(self): + self._registered_plugins = [] + self._cached_base_callbacks = {} + self._built_functions = {} + + def register(self, *plugins): + """ + Makes it possible to register your plugin. + """ + self._registered_plugins.extend(plugins) + self._build_functions() + + def decorate(self, name=None): + def decorator(callback): + @wraps(callback) + def wrapper(*args, **kwargs): + return built_functions[public_name](*args, **kwargs) + + public_name = name or callback.__name__ + + assert public_name not in self._built_functions + built_functions = self._built_functions + built_functions[public_name] = callback + self._cached_base_callbacks[public_name] = callback + + return wrapper + + return decorator + + def _build_functions(self): + for name, callback in self._cached_base_callbacks.items(): + for plugin in reversed(self._registered_plugins): + # Need to reverse so the first plugin is run first. + try: + func = getattr(plugin, name) + except AttributeError: + pass + else: + callback = func(callback) + self._built_functions[name] = callback + + +plugin_manager = _PluginManager() diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__pycache__/flask.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__pycache__/flask.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6112fccca62ee4272c543659c09d97d715a30d8 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/__pycache__/flask.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/django.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/django.py new file mode 100644 index 0000000000000000000000000000000000000000..cd443bbda59c466c693f89a33fae8c8b53594c24 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/django.py @@ -0,0 +1,296 @@ +""" +Module is used to infer Django model fields. +""" +from inspect import Parameter + +from jedi import debug +from jedi.inference.cache import inference_state_function_cache +from jedi.inference.base_value import ValueSet, iterator_to_value_set, ValueWrapper +from jedi.inference.filters import DictFilter, AttributeOverwrite +from jedi.inference.names import NameWrapper, BaseTreeParamName +from jedi.inference.compiled.value import EmptyCompiledName +from jedi.inference.value.instance import TreeInstance +from jedi.inference.value.klass import ClassMixin +from jedi.inference.gradual.base import GenericClass +from jedi.inference.gradual.generics import TupleGenericManager +from jedi.inference.signature import AbstractSignature + + +mapping = { + 'IntegerField': (None, 'int'), + 'BigIntegerField': (None, 'int'), + 'PositiveIntegerField': (None, 'int'), + 'SmallIntegerField': (None, 'int'), + 'CharField': (None, 'str'), + 'TextField': (None, 'str'), + 'EmailField': (None, 'str'), + 'GenericIPAddressField': (None, 'str'), + 'URLField': (None, 'str'), + 'FloatField': (None, 'float'), + 'BinaryField': (None, 'bytes'), + 'BooleanField': (None, 'bool'), + 'DecimalField': ('decimal', 'Decimal'), + 'TimeField': ('datetime', 'time'), + 'DurationField': ('datetime', 'timedelta'), + 'DateField': ('datetime', 'date'), + 'DateTimeField': ('datetime', 'datetime'), + 'UUIDField': ('uuid', 'UUID'), +} + +_FILTER_LIKE_METHODS = ('create', 'filter', 'exclude', 'update', 'get', + 'get_or_create', 'update_or_create') + + +@inference_state_function_cache() +def _get_deferred_attributes(inference_state): + return inference_state.import_module( + ('django', 'db', 'models', 'query_utils') + ).py__getattribute__('DeferredAttribute').execute_annotation() + + +def _infer_scalar_field(inference_state, field_name, field_tree_instance, is_instance): + try: + module_name, attribute_name = mapping[field_tree_instance.py__name__()] + except KeyError: + return None + + if not is_instance: + return _get_deferred_attributes(inference_state) + + if module_name is None: + module = inference_state.builtins_module + else: + module = inference_state.import_module((module_name,)) + + for attribute in module.py__getattribute__(attribute_name): + return attribute.execute_with_values() + + +@iterator_to_value_set +def _get_foreign_key_values(cls, field_tree_instance): + if isinstance(field_tree_instance, TreeInstance): + # TODO private access.. + argument_iterator = field_tree_instance._arguments.unpack() + key, lazy_values = next(argument_iterator, (None, None)) + if key is None and lazy_values is not None: + for value in lazy_values.infer(): + if value.py__name__() == 'str': + foreign_key_class_name = value.get_safe_value() + module = cls.get_root_context() + for v in module.py__getattribute__(foreign_key_class_name): + if v.is_class(): + yield v + elif value.is_class(): + yield value + + +def _infer_field(cls, field_name, is_instance): + inference_state = cls.inference_state + result = field_name.infer() + for field_tree_instance in result: + scalar_field = _infer_scalar_field( + inference_state, field_name, field_tree_instance, is_instance) + if scalar_field is not None: + return scalar_field + + name = field_tree_instance.py__name__() + is_many_to_many = name == 'ManyToManyField' + if name in ('ForeignKey', 'OneToOneField') or is_many_to_many: + if not is_instance: + return _get_deferred_attributes(inference_state) + + values = _get_foreign_key_values(cls, field_tree_instance) + if is_many_to_many: + return ValueSet(filter(None, [ + _create_manager_for(v, 'RelatedManager') for v in values + ])) + else: + return values.execute_with_values() + + debug.dbg('django plugin: fail to infer `%s` from class `%s`', + field_name.string_name, cls.py__name__()) + return result + + +class DjangoModelName(NameWrapper): + def __init__(self, cls, name, is_instance): + super().__init__(name) + self._cls = cls + self._is_instance = is_instance + + def infer(self): + return _infer_field(self._cls, self._wrapped_name, self._is_instance) + + +def _create_manager_for(cls, manager_cls='BaseManager'): + managers = cls.inference_state.import_module( + ('django', 'db', 'models', 'manager') + ).py__getattribute__(manager_cls) + for m in managers: + if m.is_class_mixin(): + generics_manager = TupleGenericManager((ValueSet([cls]),)) + for c in GenericClass(m, generics_manager).execute_annotation(): + return c + return None + + +def _new_dict_filter(cls, is_instance): + filters = list(cls.get_filters( + is_instance=is_instance, + include_metaclasses=False, + include_type_when_class=False) + ) + dct = { + name.string_name: DjangoModelName(cls, name, is_instance) + for filter_ in reversed(filters) + for name in filter_.values() + } + if is_instance: + # Replace the objects with a name that amounts to nothing when accessed + # in an instance. This is not perfect and still completes "objects" in + # that case, but it at least not inferes stuff like `.objects.filter`. + # It would be nicer to do that in a better way, so that it also doesn't + # show up in completions, but it's probably just not worth doing that + # for the extra amount of work. + dct['objects'] = EmptyCompiledName(cls.inference_state, 'objects') + + return DictFilter(dct) + + +def is_django_model_base(value): + return value.py__name__() == 'ModelBase' \ + and value.get_root_context().py__name__() == 'django.db.models.base' + + +def get_metaclass_filters(func): + def wrapper(cls, metaclasses, is_instance): + for metaclass in metaclasses: + if is_django_model_base(metaclass): + return [_new_dict_filter(cls, is_instance)] + + return func(cls, metaclasses, is_instance) + return wrapper + + +def tree_name_to_values(func): + def wrapper(inference_state, context, tree_name): + result = func(inference_state, context, tree_name) + if tree_name.value in _FILTER_LIKE_METHODS: + # Here we try to overwrite stuff like User.objects.filter. We need + # this to make sure that keyword param completion works on these + # kind of methods. + for v in result: + if v.get_qualified_names() == ('_BaseQuerySet', tree_name.value) \ + and v.parent_context.is_module() \ + and v.parent_context.py__name__() == 'django.db.models.query': + qs = context.get_value() + generics = qs.get_generics() + if len(generics) >= 1: + return ValueSet(QuerySetMethodWrapper(v, model) + for model in generics[0]) + + elif tree_name.value == 'BaseManager' and context.is_module() \ + and context.py__name__() == 'django.db.models.manager': + return ValueSet(ManagerWrapper(r) for r in result) + + elif tree_name.value == 'Field' and context.is_module() \ + and context.py__name__() == 'django.db.models.fields': + return ValueSet(FieldWrapper(r) for r in result) + return result + return wrapper + + +def _find_fields(cls): + for name in _new_dict_filter(cls, is_instance=False).values(): + for value in name.infer(): + if value.name.get_qualified_names(include_module_names=True) \ + == ('django', 'db', 'models', 'query_utils', 'DeferredAttribute'): + yield name + + +def _get_signatures(cls): + return [DjangoModelSignature(cls, field_names=list(_find_fields(cls)))] + + +def get_metaclass_signatures(func): + def wrapper(cls, metaclasses): + for metaclass in metaclasses: + if is_django_model_base(metaclass): + return _get_signatures(cls) + return func(cls, metaclass) + return wrapper + + +class ManagerWrapper(ValueWrapper): + def py__getitem__(self, index_value_set, contextualized_node): + return ValueSet( + GenericManagerWrapper(generic) + for generic in self._wrapped_value.py__getitem__( + index_value_set, contextualized_node) + ) + + +class GenericManagerWrapper(AttributeOverwrite, ClassMixin): + def py__get__on_class(self, calling_instance, instance, class_value): + return calling_instance.class_value.with_generics( + (ValueSet({class_value}),) + ).py__call__(calling_instance._arguments) + + def with_generics(self, generics_tuple): + return self._wrapped_value.with_generics(generics_tuple) + + +class FieldWrapper(ValueWrapper): + def py__getitem__(self, index_value_set, contextualized_node): + return ValueSet( + GenericFieldWrapper(generic) + for generic in self._wrapped_value.py__getitem__( + index_value_set, contextualized_node) + ) + + +class GenericFieldWrapper(AttributeOverwrite, ClassMixin): + def py__get__on_class(self, calling_instance, instance, class_value): + # This is mostly an optimization to avoid Jedi aborting inference, + # because of too many function executions of Field.__get__. + return ValueSet({calling_instance}) + + +class DjangoModelSignature(AbstractSignature): + def __init__(self, value, field_names): + super().__init__(value) + self._field_names = field_names + + def get_param_names(self, resolve_stars=False): + return [DjangoParamName(name) for name in self._field_names] + + +class DjangoParamName(BaseTreeParamName): + def __init__(self, field_name): + super().__init__(field_name.parent_context, field_name.tree_name) + self._field_name = field_name + + def get_kind(self): + return Parameter.KEYWORD_ONLY + + def infer(self): + return self._field_name.infer() + + +class QuerySetMethodWrapper(ValueWrapper): + def __init__(self, method, model_cls): + super().__init__(method) + self._model_cls = model_cls + + def py__get__(self, instance, class_value): + return ValueSet({QuerySetBoundMethodWrapper(v, self._model_cls) + for v in self._wrapped_value.py__get__(instance, class_value)}) + + +class QuerySetBoundMethodWrapper(ValueWrapper): + def __init__(self, method, model_cls): + super().__init__(method) + self._model_cls = model_cls + + def get_signatures(self): + return _get_signatures(self._model_cls) diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/flask.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/flask.py new file mode 100644 index 0000000000000000000000000000000000000000..8d67b83959cfd6e760f2b6617849d5b08d3e156b --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/flask.py @@ -0,0 +1,21 @@ +def import_module(callback): + """ + Handle "magic" Flask extension imports: + ``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``. + """ + def wrapper(inference_state, import_names, module_context, *args, **kwargs): + if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'): + # New style. + ipath = ('flask_' + import_names[2]), + value_set = callback(inference_state, ipath, None, *args, **kwargs) + if value_set: + return value_set + value_set = callback(inference_state, ('flaskext',), None, *args, **kwargs) + return callback( + inference_state, + ('flaskext', import_names[2]), + next(iter(value_set)), + *args, **kwargs + ) + return callback(inference_state, import_names, module_context, *args, **kwargs) + return wrapper diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/pytest.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/pytest.py new file mode 100644 index 0000000000000000000000000000000000000000..acf6ad38b3cd2d21b147722cd21745efd2029574 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/pytest.py @@ -0,0 +1,269 @@ +import sys +from typing import List +from pathlib import Path + +from parso.tree import search_ancestor +from jedi.inference.cache import inference_state_method_cache +from jedi.inference.imports import goto_import, load_module_from_path +from jedi.inference.filters import ParserTreeFilter +from jedi.inference.base_value import NO_VALUES, ValueSet +from jedi.inference.helpers import infer_call_of_leaf + +_PYTEST_FIXTURE_MODULES = [ + ('_pytest', 'monkeypatch'), + ('_pytest', 'capture'), + ('_pytest', 'logging'), + ('_pytest', 'tmpdir'), + ('_pytest', 'pytester'), +] + + +def execute(callback): + def wrapper(value, arguments): + # This might not be necessary anymore in pytest 4/5, definitely needed + # for pytest 3. + if value.py__name__() == 'fixture' \ + and value.parent_context.py__name__() == '_pytest.fixtures': + return NO_VALUES + + return callback(value, arguments) + return wrapper + + +def infer_anonymous_param(func): + def get_returns(value): + if value.tree_node.annotation is not None: + result = value.execute_with_values() + if any(v.name.get_qualified_names(include_module_names=True) + == ('typing', 'Generator') + for v in result): + return ValueSet.from_sets( + v.py__getattribute__('__next__').execute_annotation() + for v in result + ) + return result + + # In pytest we need to differentiate between generators and normal + # returns. + # Parameters still need to be anonymous, .as_context() ensures that. + function_context = value.as_context() + if function_context.is_generator(): + return function_context.merge_yield_values() + else: + return function_context.get_return_values() + + def wrapper(param_name): + # parameters with an annotation do not need special handling + if param_name.annotation_node: + return func(param_name) + is_pytest_param, param_name_is_function_name = \ + _is_a_pytest_param_and_inherited(param_name) + if is_pytest_param: + module = param_name.get_root_context() + fixtures = _goto_pytest_fixture( + module, + param_name.string_name, + # This skips the current module, because we are basically + # inheriting a fixture from somewhere else. + skip_own_module=param_name_is_function_name, + ) + if fixtures: + return ValueSet.from_sets( + get_returns(value) + for fixture in fixtures + for value in fixture.infer() + ) + return func(param_name) + return wrapper + + +def goto_anonymous_param(func): + def wrapper(param_name): + is_pytest_param, param_name_is_function_name = \ + _is_a_pytest_param_and_inherited(param_name) + if is_pytest_param: + names = _goto_pytest_fixture( + param_name.get_root_context(), + param_name.string_name, + skip_own_module=param_name_is_function_name, + ) + if names: + return names + return func(param_name) + return wrapper + + +def complete_param_names(func): + def wrapper(context, func_name, decorator_nodes): + module_context = context.get_root_context() + if _is_pytest_func(func_name, decorator_nodes): + names = [] + for module_context in _iter_pytest_modules(module_context): + names += FixtureFilter(module_context).values() + if names: + return names + return func(context, func_name, decorator_nodes) + return wrapper + + +def _goto_pytest_fixture(module_context, name, skip_own_module): + for module_context in _iter_pytest_modules(module_context, skip_own_module=skip_own_module): + names = FixtureFilter(module_context).get(name) + if names: + return names + + +def _is_a_pytest_param_and_inherited(param_name): + """ + Pytest params are either in a `test_*` function or have a pytest fixture + with the decorator @pytest.fixture. + + This is a heuristic and will work in most cases. + """ + funcdef = search_ancestor(param_name.tree_name, 'funcdef') + if funcdef is None: # A lambda + return False, False + decorators = funcdef.get_decorators() + return _is_pytest_func(funcdef.name.value, decorators), \ + funcdef.name.value == param_name.string_name + + +def _is_pytest_func(func_name, decorator_nodes): + return func_name.startswith('test') \ + or any('fixture' in n.get_code() for n in decorator_nodes) + + +def _find_pytest_plugin_modules() -> List[List[str]]: + """ + Finds pytest plugin modules hooked by setuptools entry points + + See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points + """ + if sys.version_info >= (3, 8): + from importlib.metadata import entry_points + + if sys.version_info >= (3, 10): + pytest_entry_points = entry_points(group="pytest11") + else: + pytest_entry_points = entry_points().get("pytest11", ()) + + if sys.version_info >= (3, 9): + return [ep.module.split(".") for ep in pytest_entry_points] + else: + # Python 3.8 doesn't have `EntryPoint.module`. Implement equivalent + # to what Python 3.9 does (with additional None check to placate `mypy`) + matches = [ + ep.pattern.match(ep.value) + for ep in pytest_entry_points + ] + return [x.group('module').split(".") for x in matches if x] + + else: + from pkg_resources import iter_entry_points + return [ep.module_name.split(".") for ep in iter_entry_points(group="pytest11")] + + +@inference_state_method_cache() +def _iter_pytest_modules(module_context, skip_own_module=False): + if not skip_own_module: + yield module_context + + file_io = module_context.get_value().file_io + if file_io is not None: + folder = file_io.get_parent_folder() + sys_path = module_context.inference_state.get_sys_path() + + # prevent an infinite loop when reaching the root of the current drive + last_folder = None + + while any(folder.path.startswith(p) for p in sys_path): + file_io = folder.get_file_io('conftest.py') + if Path(file_io.path) != module_context.py__file__(): + try: + m = load_module_from_path(module_context.inference_state, file_io) + conftest_module = m.as_context() + yield conftest_module + + plugins_list = m.tree_node.get_used_names().get("pytest_plugins") + if plugins_list: + name = conftest_module.create_name(plugins_list[0]) + yield from _load_pytest_plugins(module_context, name) + except FileNotFoundError: + pass + folder = folder.get_parent_folder() + + # prevent an infinite for loop if the same parent folder is return twice + if last_folder is not None and folder.path == last_folder.path: + break + last_folder = folder # keep track of the last found parent name + + for names in _PYTEST_FIXTURE_MODULES + _find_pytest_plugin_modules(): + for module_value in module_context.inference_state.import_module(names): + yield module_value.as_context() + + +def _load_pytest_plugins(module_context, name): + from jedi.inference.helpers import get_str_or_none + + for inferred in name.infer(): + for seq_value in inferred.py__iter__(): + for value in seq_value.infer(): + fq_name = get_str_or_none(value) + if fq_name: + names = fq_name.split(".") + for module_value in module_context.inference_state.import_module(names): + yield module_value.as_context() + + +class FixtureFilter(ParserTreeFilter): + def _filter(self, names): + for name in super()._filter(names): + # look for fixture definitions of imported names + if name.parent.type == "import_from": + imported_names = goto_import(self.parent_context, name) + if any( + self._is_fixture(iname.parent_context, iname.tree_name) + for iname in imported_names + # discard imports of whole modules, that have no tree_name + if iname.tree_name + ): + yield name + + elif self._is_fixture(self.parent_context, name): + yield name + + def _is_fixture(self, context, name): + funcdef = name.parent + # Class fixtures are not supported + if funcdef.type != "funcdef": + return False + decorated = funcdef.parent + if decorated.type != "decorated": + return False + decorators = decorated.children[0] + if decorators.type == 'decorators': + decorators = decorators.children + else: + decorators = [decorators] + for decorator in decorators: + dotted_name = decorator.children[1] + # A heuristic, this makes it faster. + if 'fixture' in dotted_name.get_code(): + if dotted_name.type == 'atom_expr': + # Since Python3.9 a decorator does not have dotted names + # anymore. + last_trailer = dotted_name.children[-1] + last_leaf = last_trailer.get_last_leaf() + if last_leaf == ')': + values = infer_call_of_leaf( + context, last_leaf, cut_own_trailer=True + ) + else: + values = context.infer_node(dotted_name) + else: + values = context.infer_node(dotted_name) + for value in values: + if value.name.get_qualified_names(include_module_names=True) \ + == ('_pytest', 'fixtures', 'fixture'): + return True + return False diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/registry.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a0b7496bfacba69d289f8a5383c9d4a8939653 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/registry.py @@ -0,0 +1,12 @@ +""" +This is not a plugin, this is just the place were plugins are registered. +""" + +from jedi.plugins import stdlib +from jedi.plugins import flask +from jedi.plugins import pytest +from jedi.plugins import django +from jedi.plugins import plugin_manager + + +plugin_manager.register(stdlib, flask, pytest, django) diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/stdlib.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/stdlib.py new file mode 100644 index 0000000000000000000000000000000000000000..e1004ec8171cd8b9ca474070497d79a32a51852a --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/plugins/stdlib.py @@ -0,0 +1,879 @@ +""" +Implementations of standard library functions, because it's not possible to +understand them with Jedi. + +To add a new implementation, create a function and add it to the +``_implemented`` dict at the bottom of this module. + +Note that this module exists only to implement very specific functionality in +the standard library. The usual way to understand the standard library is the +compiled module that returns the types for C-builtins. +""" +import parso +import os +from inspect import Parameter + +from jedi import debug +from jedi.inference.utils import safe_property +from jedi.inference.helpers import get_str_or_none +from jedi.inference.arguments import iterate_argument_clinic, ParamIssue, \ + repack_with_argument_clinic, AbstractArguments, TreeArgumentsWrapper +from jedi.inference import analysis +from jedi.inference import compiled +from jedi.inference.value.instance import \ + AnonymousMethodExecutionContext, MethodExecutionContext +from jedi.inference.base_value import ContextualizedNode, \ + NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper +from jedi.inference.value import ClassValue, ModuleValue +from jedi.inference.value.klass import ClassMixin +from jedi.inference.value.function import FunctionMixin +from jedi.inference.value import iterable +from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \ + LazyKnownValues +from jedi.inference.names import ValueName, BaseTreeParamName +from jedi.inference.filters import AttributeOverwrite, publish_method, \ + ParserTreeFilter, DictFilter +from jedi.inference.signature import AbstractSignature, SignatureWrapper + + +# Copied from Python 3.6's stdlib. +_NAMEDTUPLE_CLASS_TEMPLATE = """\ +_property = property +_tuple = tuple +from operator import itemgetter as _itemgetter +from collections import OrderedDict + +class {typename}(tuple): + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % list(kwds)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return self.__class__.__name__ + '({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values.' + return OrderedDict(zip(self._fields, self)) + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + + # These methods were added by Jedi. + # __new__ doesn't really work with Jedi. So adding this to nametuples seems + # like the easiest way. + def __init__(self, {arg_list}): + 'A helper function for namedtuple.' + self.__iterable = ({arg_list}) + + def __iter__(self): + for i in self.__iterable: + yield i + + def __getitem__(self, y): + return self.__iterable[y] + +{field_defs} +""" + +_NAMEDTUPLE_FIELD_TEMPLATE = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + + +def execute(callback): + def wrapper(value, arguments): + def call(): + return callback(value, arguments=arguments) + + try: + obj_name = value.name.string_name + except AttributeError: + pass + else: + p = value.parent_context + if p is not None and p.is_builtins_module(): + module_name = 'builtins' + elif p is not None and p.is_module(): + module_name = p.py__name__() + else: + return call() + + if value.is_bound_method() or value.is_instance(): + # value can be an instance for example if it is a partial + # object. + return call() + + # for now we just support builtin functions. + try: + func = _implemented[module_name][obj_name] + except KeyError: + pass + else: + return func(value, arguments=arguments, callback=call) + return call() + + return wrapper + + +def _follow_param(inference_state, arguments, index): + try: + key, lazy_value = list(arguments.unpack())[index] + except IndexError: + return NO_VALUES + else: + return lazy_value.infer() + + +def argument_clinic(clinic_string, want_value=False, want_context=False, + want_arguments=False, want_inference_state=False, + want_callback=False): + """ + Works like Argument Clinic (PEP 436), to validate function params. + """ + + def f(func): + def wrapper(value, arguments, callback): + try: + args = tuple(iterate_argument_clinic( + value.inference_state, arguments, clinic_string)) + except ParamIssue: + return NO_VALUES + + debug.dbg('builtin start %s' % value, color='MAGENTA') + kwargs = {} + if want_context: + kwargs['context'] = arguments.context + if want_value: + kwargs['value'] = value + if want_inference_state: + kwargs['inference_state'] = value.inference_state + if want_arguments: + kwargs['arguments'] = arguments + if want_callback: + kwargs['callback'] = callback + result = func(*args, **kwargs) + debug.dbg('builtin end: %s', result, color='MAGENTA') + return result + + return wrapper + return f + + +@argument_clinic('iterator[, default], /', want_inference_state=True) +def builtins_next(iterators, defaults, inference_state): + # TODO theoretically we have to check here if something is an iterator. + # That is probably done by checking if it's not a class. + return defaults | iterators.py__getattribute__('__next__').execute_with_values() + + +@argument_clinic('iterator[, default], /') +def builtins_iter(iterators_or_callables, defaults): + # TODO implement this if it's a callable. + return iterators_or_callables.py__getattribute__('__iter__').execute_with_values() + + +@argument_clinic('object, name[, default], /') +def builtins_getattr(objects, names, defaults=None): + # follow the first param + for value in objects: + for name in names: + string = get_str_or_none(name) + if string is None: + debug.warning('getattr called without str') + continue + else: + return value.py__getattribute__(string) + return NO_VALUES + + +@argument_clinic('object[, bases, dict], /') +def builtins_type(objects, bases, dicts): + if bases or dicts: + # It's a type creation... maybe someday... + return NO_VALUES + else: + return objects.py__class__() + + +class SuperInstance(LazyValueWrapper): + """To be used like the object ``super`` returns.""" + def __init__(self, inference_state, instance): + self.inference_state = inference_state + self._instance = instance # Corresponds to super().__self__ + + def _get_bases(self): + return self._instance.py__class__().py__bases__() + + def _get_wrapped_value(self): + objs = self._get_bases()[0].infer().execute_with_values() + if not objs: + # This is just a fallback and will only be used, if it's not + # possible to find a class + return self._instance + return next(iter(objs)) + + def get_filters(self, origin_scope=None): + for b in self._get_bases(): + for value in b.infer().execute_with_values(): + for f in value.get_filters(): + yield f + + +@argument_clinic('[type[, value]], /', want_context=True) +def builtins_super(types, objects, context): + instance = None + if isinstance(context, AnonymousMethodExecutionContext): + instance = context.instance + elif isinstance(context, MethodExecutionContext): + instance = context.instance + if instance is None: + return NO_VALUES + return ValueSet({SuperInstance(instance.inference_state, instance)}) + + +class ReversedObject(AttributeOverwrite): + def __init__(self, reversed_obj, iter_list): + super().__init__(reversed_obj) + self._iter_list = iter_list + + def py__iter__(self, contextualized_node=None): + return self._iter_list + + @publish_method('__next__') + def _next(self, arguments): + return ValueSet.from_sets( + lazy_value.infer() for lazy_value in self._iter_list + ) + + +@argument_clinic('sequence, /', want_value=True, want_arguments=True) +def builtins_reversed(sequences, value, arguments): + # While we could do without this variable (just by using sequences), we + # want static analysis to work well. Therefore we need to generated the + # values again. + key, lazy_value = next(arguments.unpack()) + cn = None + if isinstance(lazy_value, LazyTreeValue): + cn = ContextualizedNode(lazy_value.context, lazy_value.data) + ordered = list(sequences.iterate(cn)) + + # Repack iterator values and then run it the normal way. This is + # necessary, because `reversed` is a function and autocompletion + # would fail in certain cases like `reversed(x).__iter__` if we + # just returned the result directly. + seq, = value.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values() + return ValueSet([ReversedObject(seq, list(reversed(ordered)))]) + + +@argument_clinic('value, type, /', want_arguments=True, want_inference_state=True) +def builtins_isinstance(objects, types, arguments, inference_state): + bool_results = set() + for o in objects: + cls = o.py__class__() + try: + cls.py__bases__ + except AttributeError: + # This is temporary. Everything should have a class attribute in + # Python?! Maybe we'll leave it here, because some numpy objects or + # whatever might not. + bool_results = set([True, False]) + break + + mro = list(cls.py__mro__()) + + for cls_or_tup in types: + if cls_or_tup.is_class(): + bool_results.add(cls_or_tup in mro) + elif cls_or_tup.name.string_name == 'tuple' \ + and cls_or_tup.get_root_context().is_builtins_module(): + # Check for tuples. + classes = ValueSet.from_sets( + lazy_value.infer() + for lazy_value in cls_or_tup.iterate() + ) + bool_results.add(any(cls in mro for cls in classes)) + else: + _, lazy_value = list(arguments.unpack())[1] + if isinstance(lazy_value, LazyTreeValue): + node = lazy_value.data + message = 'TypeError: isinstance() arg 2 must be a ' \ + 'class, type, or tuple of classes and types, ' \ + 'not %s.' % cls_or_tup + analysis.add(lazy_value.context, 'type-error-isinstance', node, message) + + return ValueSet( + compiled.builtin_from_name(inference_state, str(b)) + for b in bool_results + ) + + +class StaticMethodObject(ValueWrapper): + def py__get__(self, instance, class_value): + return ValueSet([self._wrapped_value]) + + +@argument_clinic('sequence, /') +def builtins_staticmethod(functions): + return ValueSet(StaticMethodObject(f) for f in functions) + + +class ClassMethodObject(ValueWrapper): + def __init__(self, class_method_obj, function): + super().__init__(class_method_obj) + self._function = function + + def py__get__(self, instance, class_value): + return ValueSet([ + ClassMethodGet(__get__, class_value, self._function) + for __get__ in self._wrapped_value.py__getattribute__('__get__') + ]) + + +class ClassMethodGet(ValueWrapper): + def __init__(self, get_method, klass, function): + super().__init__(get_method) + self._class = klass + self._function = function + + def get_signatures(self): + return [sig.bind(self._function) for sig in self._function.get_signatures()] + + def py__call__(self, arguments): + return self._function.execute(ClassMethodArguments(self._class, arguments)) + + +class ClassMethodArguments(TreeArgumentsWrapper): + def __init__(self, klass, arguments): + super().__init__(arguments) + self._class = klass + + def unpack(self, func=None): + yield None, LazyKnownValue(self._class) + for values in self._wrapped_arguments.unpack(func): + yield values + + +@argument_clinic('sequence, /', want_value=True, want_arguments=True) +def builtins_classmethod(functions, value, arguments): + return ValueSet( + ClassMethodObject(class_method_object, function) + for class_method_object in value.py__call__(arguments=arguments) + for function in functions + ) + + +class PropertyObject(AttributeOverwrite, ValueWrapper): + api_type = 'property' + + def __init__(self, property_obj, function): + super().__init__(property_obj) + self._function = function + + def py__get__(self, instance, class_value): + if instance is None: + return ValueSet([self]) + return self._function.execute_with_values(instance) + + @publish_method('deleter') + @publish_method('getter') + @publish_method('setter') + def _return_self(self, arguments): + return ValueSet({self}) + + +@argument_clinic('func, /', want_callback=True) +def builtins_property(functions, callback): + return ValueSet( + PropertyObject(property_value, function) + for property_value in callback() + for function in functions + ) + + +def collections_namedtuple(value, arguments, callback): + """ + Implementation of the namedtuple function. + + This has to be done by processing the namedtuple class template and + inferring the result. + + """ + inference_state = value.inference_state + + # Process arguments + name = 'jedi_unknown_namedtuple' + for c in _follow_param(inference_state, arguments, 0): + x = get_str_or_none(c) + if x is not None: + name = x + break + + # TODO here we only use one of the types, we should use all. + param_values = _follow_param(inference_state, arguments, 1) + if not param_values: + return NO_VALUES + _fields = list(param_values)[0] + string = get_str_or_none(_fields) + if string is not None: + fields = string.replace(',', ' ').split() + elif isinstance(_fields, iterable.Sequence): + fields = [ + get_str_or_none(v) + for lazy_value in _fields.py__iter__() + for v in lazy_value.infer() + ] + fields = [f for f in fields if f is not None] + else: + return NO_VALUES + + # Build source code + code = _NAMEDTUPLE_CLASS_TEMPLATE.format( + typename=name, + field_names=tuple(fields), + num_fields=len(fields), + arg_list=repr(tuple(fields)).replace("'", "")[1:-1], + repr_fmt='', + field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name) + for index, name in enumerate(fields)) + ) + + # Parse source code + module = inference_state.grammar.parse(code) + generated_class = next(module.iter_classdefs()) + parent_context = ModuleValue( + inference_state, module, + code_lines=parso.split_lines(code, keepends=True), + ).as_context() + + return ValueSet([ClassValue(inference_state, parent_context, generated_class)]) + + +class PartialObject(ValueWrapper): + def __init__(self, actual_value, arguments, instance=None): + super().__init__(actual_value) + self._arguments = arguments + self._instance = instance + + def _get_functions(self, unpacked_arguments): + key, lazy_value = next(unpacked_arguments, (None, None)) + if key is not None or lazy_value is None: + debug.warning("Partial should have a proper function %s", self._arguments) + return None + return lazy_value.infer() + + def get_signatures(self): + unpacked_arguments = self._arguments.unpack() + funcs = self._get_functions(unpacked_arguments) + if funcs is None: + return [] + + arg_count = 0 + if self._instance is not None: + arg_count = 1 + keys = set() + for key, _ in unpacked_arguments: + if key is None: + arg_count += 1 + else: + keys.add(key) + return [PartialSignature(s, arg_count, keys) for s in funcs.get_signatures()] + + def py__call__(self, arguments): + funcs = self._get_functions(self._arguments.unpack()) + if funcs is None: + return NO_VALUES + + return funcs.execute( + MergedPartialArguments(self._arguments, arguments, self._instance) + ) + + def py__doc__(self): + """ + In CPython partial does not replace the docstring. However we are still + imitating it here, because we want this docstring to be worth something + for the user. + """ + callables = self._get_functions(self._arguments.unpack()) + if callables is None: + return '' + for callable_ in callables: + return callable_.py__doc__() + return '' + + def py__get__(self, instance, class_value): + return ValueSet([self]) + + +class PartialMethodObject(PartialObject): + def py__get__(self, instance, class_value): + if instance is None: + return ValueSet([self]) + return ValueSet([PartialObject(self._wrapped_value, self._arguments, instance)]) + + +class PartialSignature(SignatureWrapper): + def __init__(self, wrapped_signature, skipped_arg_count, skipped_arg_set): + super().__init__(wrapped_signature) + self._skipped_arg_count = skipped_arg_count + self._skipped_arg_set = skipped_arg_set + + def get_param_names(self, resolve_stars=False): + names = self._wrapped_signature.get_param_names()[self._skipped_arg_count:] + return [n for n in names if n.string_name not in self._skipped_arg_set] + + +class MergedPartialArguments(AbstractArguments): + def __init__(self, partial_arguments, call_arguments, instance=None): + self._partial_arguments = partial_arguments + self._call_arguments = call_arguments + self._instance = instance + + def unpack(self, funcdef=None): + unpacked = self._partial_arguments.unpack(funcdef) + # Ignore this one, it's the function. It was checked before that it's + # there. + next(unpacked, None) + if self._instance is not None: + yield None, LazyKnownValue(self._instance) + for key_lazy_value in unpacked: + yield key_lazy_value + for key_lazy_value in self._call_arguments.unpack(funcdef): + yield key_lazy_value + + +def functools_partial(value, arguments, callback): + return ValueSet( + PartialObject(instance, arguments) + for instance in value.py__call__(arguments) + ) + + +def functools_partialmethod(value, arguments, callback): + return ValueSet( + PartialMethodObject(instance, arguments) + for instance in value.py__call__(arguments) + ) + + +@argument_clinic('first, /') +def _return_first_param(firsts): + return firsts + + +@argument_clinic('seq') +def _random_choice(sequences): + return ValueSet.from_sets( + lazy_value.infer() + for sequence in sequences + for lazy_value in sequence.py__iter__() + ) + + +def _dataclass(value, arguments, callback): + for c in _follow_param(value.inference_state, arguments, 0): + if c.is_class(): + return ValueSet([DataclassWrapper(c)]) + else: + return ValueSet([value]) + return NO_VALUES + + +class DataclassWrapper(ValueWrapper, ClassMixin): + def get_signatures(self): + param_names = [] + for cls in reversed(list(self.py__mro__())): + if isinstance(cls, DataclassWrapper): + filter_ = cls.as_context().get_global_filter() + # .values ordering is not guaranteed, at least not in + # Python < 3.6, when dicts where not ordered, which is an + # implementation detail anyway. + for name in sorted(filter_.values(), key=lambda name: name.start_pos): + d = name.tree_name.get_definition() + annassign = d.children[1] + if d.type == 'expr_stmt' and annassign.type == 'annassign': + if len(annassign.children) < 4: + default = None + else: + default = annassign.children[3] + param_names.append(DataclassParamName( + parent_context=cls.parent_context, + tree_name=name.tree_name, + annotation_node=annassign.children[1], + default_node=default, + )) + return [DataclassSignature(cls, param_names)] + + +class DataclassSignature(AbstractSignature): + def __init__(self, value, param_names): + super().__init__(value) + self._param_names = param_names + + def get_param_names(self, resolve_stars=False): + return self._param_names + + +class DataclassParamName(BaseTreeParamName): + def __init__(self, parent_context, tree_name, annotation_node, default_node): + super().__init__(parent_context, tree_name) + self.annotation_node = annotation_node + self.default_node = default_node + + def get_kind(self): + return Parameter.POSITIONAL_OR_KEYWORD + + def infer(self): + if self.annotation_node is None: + return NO_VALUES + else: + return self.parent_context.infer_node(self.annotation_node) + + +class ItemGetterCallable(ValueWrapper): + def __init__(self, instance, args_value_set): + super().__init__(instance) + self._args_value_set = args_value_set + + @repack_with_argument_clinic('item, /') + def py__call__(self, item_value_set): + value_set = NO_VALUES + for args_value in self._args_value_set: + lazy_values = list(args_value.py__iter__()) + if len(lazy_values) == 1: + # TODO we need to add the contextualized value. + value_set |= item_value_set.get_item(lazy_values[0].infer(), None) + else: + value_set |= ValueSet([iterable.FakeList( + self._wrapped_value.inference_state, + [ + LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None)) + for lazy_value in lazy_values + ], + )]) + return value_set + + +@argument_clinic('func, /') +def _functools_wraps(funcs): + return ValueSet(WrapsCallable(func) for func in funcs) + + +class WrapsCallable(ValueWrapper): + # XXX this is not the correct wrapped value, it should be a weird + # partials object, but it doesn't matter, because it's always used as a + # decorator anyway. + @repack_with_argument_clinic('func, /') + def py__call__(self, funcs): + return ValueSet({Wrapped(func, self._wrapped_value) for func in funcs}) + + +class Wrapped(ValueWrapper, FunctionMixin): + def __init__(self, func, original_function): + super().__init__(func) + self._original_function = original_function + + @property + def name(self): + return self._original_function.name + + def get_signature_functions(self): + return [self] + + +@argument_clinic('*args, /', want_value=True, want_arguments=True) +def _operator_itemgetter(args_value_set, value, arguments): + return ValueSet([ + ItemGetterCallable(instance, args_value_set) + for instance in value.py__call__(arguments) + ]) + + +def _create_string_input_function(func): + @argument_clinic('string, /', want_value=True, want_arguments=True) + def wrapper(strings, value, arguments): + def iterate(): + for value in strings: + s = get_str_or_none(value) + if s is not None: + s = func(s) + yield compiled.create_simple_object(value.inference_state, s) + values = ValueSet(iterate()) + if values: + return values + return value.py__call__(arguments) + return wrapper + + +@argument_clinic('*args, /', want_callback=True) +def _os_path_join(args_set, callback): + if len(args_set) == 1: + string = '' + sequence, = args_set + is_first = True + for lazy_value in sequence.py__iter__(): + string_values = lazy_value.infer() + if len(string_values) != 1: + break + s = get_str_or_none(next(iter(string_values))) + if s is None: + break + if not is_first: + string += os.path.sep + string += s + is_first = False + else: + return ValueSet([compiled.create_simple_object(sequence.inference_state, string)]) + return callback() + + +_implemented = { + 'builtins': { + 'getattr': builtins_getattr, + 'type': builtins_type, + 'super': builtins_super, + 'reversed': builtins_reversed, + 'isinstance': builtins_isinstance, + 'next': builtins_next, + 'iter': builtins_iter, + 'staticmethod': builtins_staticmethod, + 'classmethod': builtins_classmethod, + 'property': builtins_property, + }, + 'copy': { + 'copy': _return_first_param, + 'deepcopy': _return_first_param, + }, + 'json': { + 'load': lambda value, arguments, callback: NO_VALUES, + 'loads': lambda value, arguments, callback: NO_VALUES, + }, + 'collections': { + 'namedtuple': collections_namedtuple, + }, + 'functools': { + 'partial': functools_partial, + 'partialmethod': functools_partialmethod, + 'wraps': _functools_wraps, + }, + '_weakref': { + 'proxy': _return_first_param, + }, + 'random': { + 'choice': _random_choice, + }, + 'operator': { + 'itemgetter': _operator_itemgetter, + }, + 'abc': { + # Not sure if this is necessary, but it's used a lot in typeshed and + # it's for now easier to just pass the function. + 'abstractmethod': _return_first_param, + }, + 'typing': { + # The _alias function just leads to some annoying type inference. + # Therefore, just make it return nothing, which leads to the stubs + # being used instead. This only matters for 3.7+. + '_alias': lambda value, arguments, callback: NO_VALUES, + # runtime_checkable doesn't really change anything and is just + # adding logs for infering stuff, so we can safely ignore it. + 'runtime_checkable': lambda value, arguments, callback: NO_VALUES, + }, + 'dataclasses': { + # For now this works at least better than Jedi trying to understand it. + 'dataclass': _dataclass + }, + # attrs exposes declaration interface roughly compatible with dataclasses + # via attrs.define, attrs.frozen and attrs.mutable + # https://www.attrs.org/en/stable/names.html + 'attr': { + 'define': _dataclass, + 'frozen': _dataclass, + }, + 'attrs': { + 'define': _dataclass, + 'frozen': _dataclass, + }, + 'os.path': { + 'dirname': _create_string_input_function(os.path.dirname), + 'abspath': _create_string_input_function(os.path.abspath), + 'relpath': _create_string_input_function(os.path.relpath), + 'join': _os_path_join, + } +} + + +def get_metaclass_filters(func): + def wrapper(cls, metaclasses, is_instance): + for metaclass in metaclasses: + if metaclass.py__name__() == 'EnumMeta' \ + and metaclass.get_root_context().py__name__() == 'enum': + filter_ = ParserTreeFilter(parent_context=cls.as_context()) + return [DictFilter({ + name.string_name: EnumInstance(cls, name).name + for name in filter_.values() + })] + return func(cls, metaclasses, is_instance) + return wrapper + + +class EnumInstance(LazyValueWrapper): + def __init__(self, cls, name): + self.inference_state = cls.inference_state + self._cls = cls # Corresponds to super().__self__ + self._name = name + self.tree_node = self._name.tree_name + + @safe_property + def name(self): + return ValueName(self, self._name.tree_name) + + def _get_wrapped_value(self): + n = self._name.string_name + if n.startswith('__') and n.endswith('__') or self._name.api_type == 'function': + inferred = self._name.infer() + if inferred: + return next(iter(inferred)) + o, = self.inference_state.builtins_module.py__getattribute__('object') + return o + + value, = self._cls.execute_with_values() + return value + + def get_filters(self, origin_scope=None): + yield DictFilter(dict( + name=compiled.create_simple_object(self.inference_state, self._name.string_name).name, + value=self._name, + )) + for f in self._get_wrapped_value().get_filters(): + yield f + + +def tree_name_to_values(func): + def wrapper(inference_state, context, tree_name): + if tree_name.value == 'sep' and context.is_module() and context.py__name__() == 'os.path': + return ValueSet({ + compiled.create_simple_object(inference_state, os.path.sep), + }) + return func(inference_state, context, tree_name) + return wrapper diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/base.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/base.pyi new file mode 100644 index 0000000000000000000000000000000000000000..eafdad55061f6c00462f9281aabdabcec3804ec9 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/base.pyi @@ -0,0 +1,102 @@ +from typing import Any, Callable, Dict, Iterator, List, Optional + +from django.db.backends.base.client import BaseDatabaseClient +from django.db.backends.base.creation import BaseDatabaseCreation +from django.db.backends.base.validation import BaseDatabaseValidation +from django.db.backends.utils import CursorDebugWrapper, CursorWrapper + +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + +from django.db.backends.base.features import BaseDatabaseFeatures + +from django.db.backends.base.introspection import BaseDatabaseIntrospection + +NO_DB_ALIAS: str + +class BaseDatabaseWrapper: + data_types: Any = ... + data_types_suffix: Any = ... + data_type_check_constraints: Any = ... + ops: Any = ... + vendor: str = ... + display_name: str = ... + SchemaEditorClass: Optional[BaseDatabaseSchemaEditor] = ... + client_class: Any = ... + creation_class: Any = ... + features_class: Any = ... + introspection_class: Any = ... + ops_class: Any = ... + validation_class: Any = ... + queries_limit: int = ... + connection: Any = ... + settings_dict: Any = ... + alias: str = ... + queries_log: Any = ... + force_debug_cursor: bool = ... + autocommit: bool = ... + in_atomic_block: bool = ... + savepoint_state: int = ... + savepoint_ids: Any = ... + commit_on_exit: bool = ... + needs_rollback: bool = ... + close_at: Optional[Any] = ... + closed_in_transaction: bool = ... + errors_occurred: bool = ... + allow_thread_sharing: bool = ... + run_on_commit: List[Any] = ... + run_commit_hooks_on_set_autocommit_on: bool = ... + execute_wrappers: List[Any] = ... + client: BaseDatabaseClient = ... + creation: BaseDatabaseCreation = ... + features: BaseDatabaseFeatures = ... + introspection: BaseDatabaseIntrospection = ... + validation: BaseDatabaseValidation = ... + def __init__( + self, settings_dict: Dict[str, Dict[str, str]], alias: str = ..., allow_thread_sharing: bool = ... + ) -> None: ... + def ensure_timezone(self) -> bool: ... + def timezone(self): ... + def timezone_name(self): ... + @property + def queries_logged(self) -> bool: ... + @property + def queries(self) -> List[Dict[str, str]]: ... + def get_connection_params(self) -> None: ... + def get_new_connection(self, conn_params: Any) -> None: ... + def init_connection_state(self) -> None: ... + def create_cursor(self, name: Optional[Any] = ...) -> None: ... + def connect(self) -> None: ... + def check_settings(self) -> None: ... + def ensure_connection(self) -> None: ... + def cursor(self) -> CursorWrapper: ... + def commit(self) -> None: ... + def rollback(self) -> None: ... + def close(self) -> None: ... + def savepoint(self) -> str: ... + def savepoint_rollback(self, sid: str) -> None: ... + def savepoint_commit(self, sid: str) -> None: ... + def clean_savepoints(self) -> None: ... + def get_autocommit(self) -> bool: ... + def set_autocommit(self, autocommit: bool, force_begin_transaction_with_broken_autocommit: bool = ...) -> None: ... + def get_rollback(self) -> bool: ... + def set_rollback(self, rollback: bool) -> None: ... + def validate_no_atomic_block(self) -> None: ... + def validate_no_broken_transaction(self) -> None: ... + def constraint_checks_disabled(self) -> Iterator[None]: ... + def disable_constraint_checking(self): ... + def enable_constraint_checking(self) -> None: ... + def check_constraints(self, table_names: Optional[Any] = ...) -> None: ... + def is_usable(self) -> None: ... + def close_if_unusable_or_obsolete(self) -> None: ... + def validate_thread_sharing(self) -> None: ... + def prepare_database(self) -> None: ... + def wrap_database_errors(self) -> Any: ... + def chunked_cursor(self) -> CursorWrapper: ... + def make_debug_cursor(self, cursor: CursorWrapper) -> CursorDebugWrapper: ... + def make_cursor(self, cursor: CursorWrapper) -> CursorWrapper: ... + def temporary_connection(self) -> None: ... + def schema_editor(self, *args: Any, **kwargs: Any) -> BaseDatabaseSchemaEditor: ... + def on_commit(self, func: Callable) -> None: ... + def run_and_clear_commit_hooks(self) -> None: ... + def execute_wrapper(self, wrapper: Callable) -> Iterator[None]: ... + def copy(self, alias: None = ..., allow_thread_sharing: None = ...) -> Any: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/client.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/client.pyi new file mode 100644 index 0000000000000000000000000000000000000000..6fdf5cea80946591d2e3c40b31285f3191cacac2 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/client.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from django.db.backends.base.base import BaseDatabaseWrapper + +class BaseDatabaseClient: + executable_name: Any = ... + connection: Any = ... + def __init__(self, connection: BaseDatabaseWrapper) -> None: ... + def runshell(self) -> None: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/creation.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/creation.pyi new file mode 100644 index 0000000000000000000000000000000000000000..8904232ff14f549f3df7e68c9f56a3d21636d82c --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/creation.pyi @@ -0,0 +1,24 @@ +from typing import Any, Dict, Optional, Tuple, Union + +from django.db.backends.base.base import BaseDatabaseWrapper + +TEST_DATABASE_PREFIX: str + +class BaseDatabaseCreation: + connection: Any = ... + def __init__(self, connection: BaseDatabaseWrapper) -> None: ... + def create_test_db( + self, verbosity: int = ..., autoclobber: bool = ..., serialize: bool = ..., keepdb: bool = ... + ) -> str: ... + def set_as_test_mirror( + self, primary_settings_dict: Dict[str, Optional[Union[Dict[str, None], int, str]]] + ) -> None: ... + def serialize_db_to_string(self) -> str: ... + def deserialize_db_from_string(self, data: str) -> None: ... + def clone_test_db(self, suffix: Any, verbosity: int = ..., autoclobber: bool = ..., keepdb: bool = ...) -> None: ... + def get_test_db_clone_settings(self, suffix: Any): ... + def destroy_test_db( + self, old_database_name: str = ..., verbosity: int = ..., keepdb: bool = ..., suffix: None = ... + ) -> None: ... + def sql_table_creation_suffix(self): ... + def test_db_signature(self) -> Tuple[str, str, str, str]: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/introspection.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/introspection.pyi new file mode 100644 index 0000000000000000000000000000000000000000..33d128fee6d916e8f2eb46f5a77e0ef0baaaf547 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/introspection.pyi @@ -0,0 +1,27 @@ +from collections import namedtuple +from typing import Any, Dict, List, Optional, Set, Type + +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.utils import CursorWrapper +from django.db.models.base import Model + +TableInfo = namedtuple("TableInfo", ["name", "type"]) + +FieldInfo = namedtuple("FieldInfo", "name type_code display_size internal_size precision scale null_ok default") + +class BaseDatabaseIntrospection: + data_types_reverse: Any = ... + connection: Any = ... + def __init__(self, connection: BaseDatabaseWrapper) -> None: ... + def get_field_type(self, data_type: str, description: FieldInfo) -> str: ... + def table_name_converter(self, name: str) -> str: ... + def column_name_converter(self, name: str) -> str: ... + def table_names(self, cursor: Optional[CursorWrapper] = ..., include_views: bool = ...) -> List[str]: ... + def get_table_list(self, cursor: Any) -> None: ... + def django_table_names(self, only_existing: bool = ..., include_views: bool = ...) -> List[str]: ... + def installed_models(self, tables: List[str]) -> Set[Type[Model]]: ... + def sequence_list(self) -> List[Dict[str, str]]: ... + def get_sequences(self, cursor: Any, table_name: Any, table_fields: Any = ...) -> None: ... + def get_key_columns(self, cursor: Any, table_name: Any) -> None: ... + def get_primary_key_column(self, cursor: Any, table_name: Any): ... + def get_constraints(self, cursor: Any, table_name: Any) -> None: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/operations.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/operations.pyi new file mode 100644 index 0000000000000000000000000000000000000000..fa9905c43ef3567029bd5475f58b5f2fe07e80a0 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/base/operations.pyi @@ -0,0 +1,104 @@ +from datetime import date, datetime, timedelta, time +from decimal import Decimal +from typing import Any, List, Optional, Sequence, Tuple, Type, Union + +from django.core.management.color import Style +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.utils import CursorWrapper +from django.db.models.base import Model +from django.db.models.expressions import Case, Expression +from django.db.models.sql.compiler import SQLCompiler + +from django.db import DefaultConnectionProxy +from django.db.models.fields import Field + +_Connection = Union[DefaultConnectionProxy, BaseDatabaseWrapper] + +class BaseDatabaseOperations: + compiler_module: str = ... + integer_field_ranges: Any = ... + set_operators: Any = ... + cast_data_types: Any = ... + cast_char_field_without_max_length: Any = ... + PRECEDING: str = ... + FOLLOWING: str = ... + UNBOUNDED_PRECEDING: Any = ... + UNBOUNDED_FOLLOWING: Any = ... + CURRENT_ROW: str = ... + explain_prefix: Any = ... + connection: _Connection = ... + def __init__(self, connection: Optional[_Connection]) -> None: ... + def autoinc_sql(self, table: str, column: str) -> None: ... + def bulk_batch_size(self, fields: Any, objs: Any): ... + def cache_key_culling_sql(self) -> str: ... + def unification_cast_sql(self, output_field: Field) -> str: ... + def date_extract_sql(self, lookup_type: None, field_name: None) -> Any: ... + def date_interval_sql(self, timedelta: None) -> Any: ... + def date_trunc_sql(self, lookup_type: None, field_name: None) -> Any: ... + def datetime_cast_date_sql(self, field_name: None, tzname: None) -> Any: ... + def datetime_cast_time_sql(self, field_name: None, tzname: None) -> Any: ... + def datetime_extract_sql(self, lookup_type: None, field_name: None, tzname: None) -> Any: ... + def datetime_trunc_sql(self, lookup_type: None, field_name: None, tzname: None) -> Any: ... + def time_trunc_sql(self, lookup_type: None, field_name: None) -> Any: ... + def time_extract_sql(self, lookup_type: None, field_name: None) -> Any: ... + def deferrable_sql(self) -> str: ... + def distinct_sql(self, fields: List[str], params: Optional[List[Any]]) -> Tuple[List[str], List[Any]]: ... + def fetch_returned_insert_id(self, cursor: Any): ... + def field_cast_sql(self, db_type: Optional[str], internal_type: str) -> str: ... + def force_no_ordering(self) -> List[Any]: ... + def for_update_sql(self, nowait: bool = ..., skip_locked: bool = ..., of: Any = ...): ... + def limit_offset_sql(self, low_mark: int, high_mark: Optional[int]) -> str: ... + def last_executed_query(self, cursor: Any, sql: Any, params: Any): ... + def last_insert_id(self, cursor: CursorWrapper, table_name: str, pk_name: str) -> int: ... + def lookup_cast(self, lookup_type: str, internal_type: str = ...) -> str: ... + def max_in_list_size(self) -> None: ... + def max_name_length(self) -> None: ... + def no_limit_value(self) -> Any: ... + def pk_default_value(self) -> str: ... + def prepare_sql_script(self, sql: Any): ... + def process_clob(self, value: str) -> str: ... + def return_insert_id(self) -> None: ... + def compiler(self, compiler_name: str) -> Type[SQLCompiler]: ... + def quote_name(self, name: str) -> Any: ... + def random_function_sql(self): ... + def regex_lookup(self, lookup_type: str) -> Any: ... + def savepoint_create_sql(self, sid: str) -> str: ... + def savepoint_commit_sql(self, sid: str) -> str: ... + def savepoint_rollback_sql(self, sid: str) -> str: ... + def set_time_zone_sql(self) -> str: ... + def sql_flush(self, style: None, tables: None, sequences: None, allow_cascade: bool = ...) -> Any: ... + def execute_sql_flush(self, using: str, sql_list: List[str]) -> None: ... + def sequence_reset_by_name_sql(self, style: None, sequences: List[Any]) -> List[Any]: ... + def sequence_reset_sql(self, style: Style, model_list: Sequence[Type[Model]]) -> List[Any]: ... + def start_transaction_sql(self) -> str: ... + def end_transaction_sql(self, success: bool = ...) -> str: ... + def tablespace_sql(self, tablespace: Optional[str], inline: bool = ...) -> str: ... + def prep_for_like_query(self, x: str) -> str: ... + prep_for_iexact_query: Any = ... + def validate_autopk_value(self, value: int) -> int: ... + def adapt_unknown_value(self, value: Any) -> Any: ... + def adapt_datefield_value(self, value: Optional[date]) -> Optional[str]: ... + def adapt_datetimefield_value(self, value: Optional[datetime]) -> Optional[str]: ... + def adapt_timefield_value(self, value: Optional[Union[datetime, time]]) -> Optional[str]: ... + def adapt_decimalfield_value( + self, value: Optional[Decimal], max_digits: Optional[int] = ..., decimal_places: Optional[int] = ... + ) -> Optional[str]: ... + def adapt_ipaddressfield_value(self, value: Optional[str]) -> Optional[str]: ... + def year_lookup_bounds_for_date_field(self, value: int) -> List[str]: ... + def year_lookup_bounds_for_datetime_field(self, value: int) -> List[str]: ... + def get_db_converters(self, expression: Expression) -> List[Any]: ... + def convert_durationfield_value( + self, value: Optional[float], expression: Expression, connection: _Connection + ) -> Optional[timedelta]: ... + def check_expression_support(self, expression: Any) -> None: ... + def combine_expression(self, connector: str, sub_expressions: List[str]) -> str: ... + def combine_duration_expression(self, connector: Any, sub_expressions: Any): ... + def binary_placeholder_sql(self, value: Optional[Case]) -> str: ... + def modify_insert_params(self, placeholder: str, params: Any) -> Any: ... + def integer_field_range(self, internal_type: Any): ... + def subtract_temporals(self, internal_type: Any, lhs: Any, rhs: Any): ... + def window_frame_start(self, start: Any): ... + def window_frame_end(self, end: Any): ... + def window_frame_rows_start_end(self, start: Optional[int] = ..., end: Optional[int] = ...) -> Any: ... + def window_frame_range_start_end(self, start: Optional[int] = ..., end: Optional[int] = ...) -> Any: ... + def explain_query_prefix(self, format: Optional[str] = ..., **options: Any) -> str: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/dummy/__init__.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/dummy/__init__.pyi new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/dummy/base.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/dummy/base.pyi new file mode 100644 index 0000000000000000000000000000000000000000..93dd162423a4a55b36e6dbaaf48391c71fdfd7f5 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/dummy/base.pyi @@ -0,0 +1,31 @@ +from typing import Any + +from django.db.backends.base.base import BaseDatabaseWrapper +from django.db.backends.base.client import BaseDatabaseClient +from django.db.backends.base.creation import BaseDatabaseCreation +from django.db.backends.base.introspection import BaseDatabaseIntrospection +from django.db.backends.base.operations import BaseDatabaseOperations + +def complain(*args: Any, **kwargs: Any) -> Any: ... +def ignore(*args: Any, **kwargs: Any) -> None: ... + +class DatabaseOperations(BaseDatabaseOperations): + quote_name: Any = ... + +class DatabaseClient(BaseDatabaseClient): + runshell: Any = ... + +class DatabaseCreation(BaseDatabaseCreation): + create_test_db: Any = ... + destroy_test_db: Any = ... + +class DatabaseIntrospection(BaseDatabaseIntrospection): + get_table_list: Any = ... + get_table_description: Any = ... + get_relations: Any = ... + get_indexes: Any = ... + get_key_columns: Any = ... + +class DatabaseWrapper(BaseDatabaseWrapper): + operators: Any = ... + ensure_connection: Any = ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/features.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/features.pyi new file mode 100644 index 0000000000000000000000000000000000000000..bb77363578c98763964b35dde345ee8e354e7e00 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/features.pyi @@ -0,0 +1,3 @@ +from django.db.backends.base.features import BaseDatabaseFeatures + +class DatabaseFeatures(BaseDatabaseFeatures): ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/operations.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/operations.pyi new file mode 100644 index 0000000000000000000000000000000000000000..359719cf5dcdd4d9fa5e0a1f4b602f3070d05e45 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/django-stubs/django-stubs/db/backends/sqlite3/operations.pyi @@ -0,0 +1,3 @@ +from django.db.backends.base.operations import BaseDatabaseOperations + +class DatabaseOperations(BaseDatabaseOperations): ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/base_futures.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/base_futures.pyi new file mode 100644 index 0000000000000000000000000000000000000000..270a69685c2445ae6eb1b9819a7541fd5645dcac --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/base_futures.pyi @@ -0,0 +1,22 @@ +import sys +from typing import Any, Callable, List, Sequence, Tuple +from typing_extensions import Literal + +if sys.version_info >= (3, 7): + from contextvars import Context + +from . import futures + +_PENDING: Literal["PENDING"] # undocumented +_CANCELLED: Literal["CANCELLED"] # undocumented +_FINISHED: Literal["FINISHED"] # undocumented + +def isfuture(obj: object) -> bool: ... + +if sys.version_info >= (3, 7): + def _format_callbacks(cb: Sequence[Tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented + +else: + def _format_callbacks(cb: Sequence[Callable[[futures.Future[Any]], None]]) -> str: ... # undocumented + +def _future_repr_info(future: futures.Future[Any]) -> List[str]: ... # undocumented diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/events.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/events.pyi new file mode 100644 index 0000000000000000000000000000000000000000..c61a96eea0e1e625998c4d2233fc976d45a9f2c0 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/events.pyi @@ -0,0 +1,502 @@ +import ssl +import sys +from _typeshed import FileDescriptorLike +from abc import ABCMeta, abstractmethod +from asyncio.futures import Future +from asyncio.protocols import BaseProtocol +from asyncio.tasks import Task +from asyncio.transports import BaseTransport +from asyncio.unix_events import AbstractChildWatcher +from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket +from typing import IO, Any, Awaitable, Callable, Dict, Generator, List, Optional, Sequence, Tuple, TypeVar, Union, overload + +if sys.version_info >= (3, 7): + from contextvars import Context + +_T = TypeVar("_T") +_Context = Dict[str, Any] +_ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] +_ProtocolFactory = Callable[[], BaseProtocol] +_SSLContext = Union[bool, None, ssl.SSLContext] +_TransProtPair = Tuple[BaseTransport, BaseProtocol] + +class Handle: + _cancelled = False + _args: Sequence[Any] + if sys.version_info >= (3, 7): + def __init__( + self, callback: Callable[..., Any], args: Sequence[Any], loop: AbstractEventLoop, context: Optional[Context] = ... + ) -> None: ... + else: + def __init__(self, callback: Callable[..., Any], args: Sequence[Any], loop: AbstractEventLoop) -> None: ... + def __repr__(self) -> str: ... + def cancel(self) -> None: ... + def _run(self) -> None: ... + if sys.version_info >= (3, 7): + def cancelled(self) -> bool: ... + +class TimerHandle(Handle): + if sys.version_info >= (3, 7): + def __init__( + self, + when: float, + callback: Callable[..., Any], + args: Sequence[Any], + loop: AbstractEventLoop, + context: Optional[Context] = ..., + ) -> None: ... + else: + def __init__(self, when: float, callback: Callable[..., Any], args: Sequence[Any], loop: AbstractEventLoop) -> None: ... + def __hash__(self) -> int: ... + if sys.version_info >= (3, 7): + def when(self) -> float: ... + +class AbstractServer: + sockets: Optional[List[socket]] + def close(self) -> None: ... + if sys.version_info >= (3, 7): + async def __aenter__(self: _T) -> _T: ... + async def __aexit__(self, *exc: Any) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def is_serving(self) -> bool: ... + async def start_serving(self) -> None: ... + async def serve_forever(self) -> None: ... + async def wait_closed(self) -> None: ... + +class AbstractEventLoop(metaclass=ABCMeta): + slow_callback_duration: float = ... + @abstractmethod + def run_forever(self) -> None: ... + # Can't use a union, see mypy issue # 1873. + @overload + @abstractmethod + def run_until_complete(self, future: Generator[Any, None, _T]) -> _T: ... + @overload + @abstractmethod + def run_until_complete(self, future: Awaitable[_T]) -> _T: ... + @abstractmethod + def stop(self) -> None: ... + @abstractmethod + def is_running(self) -> bool: ... + @abstractmethod + def is_closed(self) -> bool: ... + @abstractmethod + def close(self) -> None: ... + if sys.version_info >= (3, 6): + @abstractmethod + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + @abstractmethod + def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... + @abstractmethod + def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + @abstractmethod + def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + @abstractmethod + def time(self) -> float: ... + # Future methods + @abstractmethod + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 8): + @abstractmethod + def create_task(self, coro: Union[Awaitable[_T], Generator[Any, None, _T]], *, name: Optional[str] = ...) -> Task[_T]: ... + else: + @abstractmethod + def create_task(self, coro: Union[Awaitable[_T], Generator[Any, None, _T]]) -> Task[_T]: ... + @abstractmethod + def set_task_factory( + self, factory: Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]] + ) -> None: ... + @abstractmethod + def get_task_factory(self) -> Optional[Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]]]: ... + # Methods for interacting with threads + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... + @abstractmethod + def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Awaitable[_T]: ... + @abstractmethod + def set_default_executor(self, executor: Any) -> None: ... + # Network I/O methods returning Futures. + @abstractmethod + async def getaddrinfo( + self, + host: Optional[str], + port: Union[str, int, None], + *, + family: int = ..., + type: int = ..., + proto: int = ..., + flags: int = ..., + ) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]: ... + @abstractmethod + async def getnameinfo( + self, sockaddr: Union[Tuple[str, int], Tuple[str, int, int, int]], flags: int = ... + ) -> Tuple[str, str]: ... + if sys.version_info >= (3, 8): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: None = ..., + local_addr: Optional[Tuple[str, int]] = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + happy_eyeballs_delay: Optional[float] = ..., + interleave: Optional[int] = ..., + ) -> _TransProtPair: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: socket, + local_addr: None = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + happy_eyeballs_delay: Optional[float] = ..., + interleave: Optional[int] = ..., + ) -> _TransProtPair: ... + elif sys.version_info >= (3, 7): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: None = ..., + local_addr: Optional[Tuple[str, int]] = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + ) -> _TransProtPair: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: socket, + local_addr: None = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + ) -> _TransProtPair: ... + else: + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: None = ..., + local_addr: Optional[Tuple[str, int]] = ..., + server_hostname: Optional[str] = ..., + ) -> _TransProtPair: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + ssl: _SSLContext = ..., + family: int = ..., + proto: int = ..., + flags: int = ..., + sock: socket, + local_addr: None = ..., + server_hostname: Optional[str] = ..., + ) -> _TransProtPair: ... + if sys.version_info >= (3, 7): + @abstractmethod + async def sock_sendfile( + self, sock: socket, file: IO[bytes], offset: int = ..., count: Optional[int] = ..., *, fallback: bool = ... + ) -> int: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: Optional[Union[str, Sequence[str]]] = ..., + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: Optional[bool] = ..., + reuse_port: Optional[bool] = ..., + ssl_handshake_timeout: Optional[float] = ..., + start_serving: bool = ..., + ) -> AbstractServer: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: Optional[bool] = ..., + reuse_port: Optional[bool] = ..., + ssl_handshake_timeout: Optional[float] = ..., + start_serving: bool = ..., + ) -> AbstractServer: ... + async def create_unix_connection( + self, + protocol_factory: _ProtocolFactory, + path: Optional[str] = ..., + *, + ssl: _SSLContext = ..., + sock: Optional[socket] = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + ) -> _TransProtPair: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: Optional[str] = ..., + *, + sock: Optional[socket] = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + ssl_handshake_timeout: Optional[float] = ..., + start_serving: bool = ..., + ) -> AbstractServer: ... + @abstractmethod + async def sendfile( + self, + transport: BaseTransport, + file: IO[bytes], + offset: int = ..., + count: Optional[int] = ..., + *, + fallback: bool = ..., + ) -> int: ... + @abstractmethod + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = ..., + server_hostname: Optional[str] = ..., + ssl_handshake_timeout: Optional[float] = ..., + ) -> BaseTransport: ... + else: + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: Optional[Union[str, Sequence[str]]] = ..., + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: Optional[bool] = ..., + reuse_port: Optional[bool] = ..., + ) -> AbstractServer: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = ..., + port: None = ..., + *, + family: int = ..., + flags: int = ..., + sock: socket, + backlog: int = ..., + ssl: _SSLContext = ..., + reuse_address: Optional[bool] = ..., + reuse_port: Optional[bool] = ..., + ) -> AbstractServer: ... + async def create_unix_connection( + self, + protocol_factory: _ProtocolFactory, + path: str, + *, + ssl: _SSLContext = ..., + sock: Optional[socket] = ..., + server_hostname: Optional[str] = ..., + ) -> _TransProtPair: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: str, + *, + sock: Optional[socket] = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + ) -> AbstractServer: ... + @abstractmethod + async def create_datagram_endpoint( + self, + protocol_factory: _ProtocolFactory, + local_addr: Optional[Tuple[str, int]] = ..., + remote_addr: Optional[Tuple[str, int]] = ..., + *, + family: int = ..., + proto: int = ..., + flags: int = ..., + reuse_address: Optional[bool] = ..., + reuse_port: Optional[bool] = ..., + allow_broadcast: Optional[bool] = ..., + sock: Optional[socket] = ..., + ) -> _TransProtPair: ... + # Pipes and subprocesses. + @abstractmethod + async def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... + @abstractmethod + async def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... + @abstractmethod + async def subprocess_shell( + self, + protocol_factory: _ProtocolFactory, + cmd: Union[bytes, str], + *, + stdin: Any = ..., + stdout: Any = ..., + stderr: Any = ..., + **kwargs: Any, + ) -> _TransProtPair: ... + @abstractmethod + async def subprocess_exec( + self, + protocol_factory: _ProtocolFactory, + *args: Any, + stdin: Any = ..., + stdout: Any = ..., + stderr: Any = ..., + **kwargs: Any, + ) -> _TransProtPair: ... + @abstractmethod + def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + @abstractmethod + def remove_reader(self, fd: FileDescriptorLike) -> None: ... + @abstractmethod + def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... + @abstractmethod + def remove_writer(self, fd: FileDescriptorLike) -> None: ... + # Completion based I/O methods returning Futures prior to 3.7 + if sys.version_info >= (3, 7): + @abstractmethod + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + @abstractmethod + async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... + @abstractmethod + async def sock_sendall(self, sock: socket, data: bytes) -> None: ... + @abstractmethod + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + @abstractmethod + async def sock_accept(self, sock: socket) -> Tuple[socket, _RetAddress]: ... + else: + @abstractmethod + def sock_recv(self, sock: socket, nbytes: int) -> Future[bytes]: ... + @abstractmethod + def sock_sendall(self, sock: socket, data: bytes) -> Future[None]: ... + @abstractmethod + def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... + @abstractmethod + def sock_accept(self, sock: socket) -> Future[Tuple[socket, _RetAddress]]: ... + # Signal handling. + @abstractmethod + def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... + @abstractmethod + def remove_signal_handler(self, sig: int) -> None: ... + # Error handlers. + @abstractmethod + def set_exception_handler(self, handler: Optional[_ExceptionHandler]) -> None: ... + @abstractmethod + def get_exception_handler(self) -> Optional[_ExceptionHandler]: ... + @abstractmethod + def default_exception_handler(self, context: _Context) -> None: ... + @abstractmethod + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + @abstractmethod + def get_debug(self) -> bool: ... + @abstractmethod + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 9): + @abstractmethod + async def shutdown_default_executor(self) -> None: ... + +class AbstractEventLoopPolicy(metaclass=ABCMeta): + @abstractmethod + def get_event_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def set_event_loop(self, loop: Optional[AbstractEventLoop]) -> None: ... + @abstractmethod + def new_event_loop(self) -> AbstractEventLoop: ... + # Child processes handling (Unix only). + @abstractmethod + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... + +class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): + def __init__(self) -> None: ... + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: Optional[AbstractEventLoop]) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... + +def get_event_loop_policy() -> AbstractEventLoopPolicy: ... +def set_event_loop_policy(policy: Optional[AbstractEventLoopPolicy]) -> None: ... +def get_event_loop() -> AbstractEventLoop: ... +def set_event_loop(loop: Optional[AbstractEventLoop]) -> None: ... +def new_event_loop() -> AbstractEventLoop: ... +def get_child_watcher() -> AbstractChildWatcher: ... +def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... +def _set_running_loop(__loop: Optional[AbstractEventLoop]) -> None: ... +def _get_running_loop() -> AbstractEventLoop: ... + +if sys.version_info >= (3, 7): + def get_running_loop() -> AbstractEventLoop: ... + if sys.version_info < (3, 8): + class SendfileNotAvailableError(RuntimeError): ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/format_helpers.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/format_helpers.pyi new file mode 100644 index 0000000000000000000000000000000000000000..5f2baf7b6c59977fde40eb9e782f62c478e8e88f --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/format_helpers.pyi @@ -0,0 +1,20 @@ +import functools +import sys +import traceback +from types import FrameType, FunctionType +from typing import Any, Dict, Iterable, Optional, Tuple, Union, overload + +class _HasWrapper: + __wrapper__: Union[_HasWrapper, FunctionType] + +_FuncType = Union[FunctionType, _HasWrapper, functools.partial, functools.partialmethod] + +if sys.version_info >= (3, 7): + @overload + def _get_function_source(func: _FuncType) -> Tuple[str, int]: ... + @overload + def _get_function_source(func: object) -> Optional[Tuple[str, int]]: ... + def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... + def _format_args_and_kwargs(args: Iterable[Any], kwargs: Dict[str, Any]) -> str: ... + def _format_callback(func: object, args: Iterable[Any], kwargs: Dict[str, Any], suffix: str = ...) -> str: ... + def extract_stack(f: Optional[FrameType] = ..., limit: Optional[int] = ...) -> traceback.StackSummary: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/log.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/log.pyi new file mode 100644 index 0000000000000000000000000000000000000000..e1de0b3bb845e76087dbf3b3537299a409384c1e --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/log.pyi @@ -0,0 +1,3 @@ +import logging + +logger: logging.Logger diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/queues.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/queues.pyi new file mode 100644 index 0000000000000000000000000000000000000000..2d4bada035b39e917ca57d033343481b76d4a60f --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/asyncio/queues.pyi @@ -0,0 +1,36 @@ +import sys +from asyncio.events import AbstractEventLoop +from typing import Any, Generic, Optional, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +class QueueEmpty(Exception): ... +class QueueFull(Exception): ... + +_T = TypeVar("_T") + +class Queue(Generic[_T]): + def __init__(self, maxsize: int = ..., *, loop: Optional[AbstractEventLoop] = ...) -> None: ... + def _init(self, maxsize: int) -> None: ... + def _get(self) -> _T: ... + def _put(self, item: _T) -> None: ... + def __repr__(self) -> str: ... + def __str__(self) -> str: ... + def _format(self) -> str: ... + def qsize(self) -> int: ... + @property + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + async def put(self, item: _T) -> None: ... + def put_nowait(self, item: _T) -> None: ... + async def get(self) -> _T: ... + def get_nowait(self) -> _T: ... + async def join(self) -> None: ... + def task_done(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, type: Any) -> GenericAlias: ... + +class PriorityQueue(Queue[_T]): ... +class LifoQueue(Queue[_T]): ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/html/parser.pyi b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/html/parser.pyi new file mode 100644 index 0000000000000000000000000000000000000000..31240f78c58e663323c364455a47602528b7f0ec --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/third_party/typeshed/stdlib/3/html/parser.pyi @@ -0,0 +1,20 @@ +from _markupbase import ParserBase +from typing import List, Optional, Tuple + +class HTMLParser(ParserBase): + def __init__(self, *, convert_charrefs: bool = ...) -> None: ... + def feed(self, feed: str) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + def getpos(self) -> Tuple[int, int]: ... + def get_starttag_text(self) -> Optional[str]: ... + def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: ... + def handle_endtag(self, tag: str) -> None: ... + def handle_startendtag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None: ... + def handle_data(self, data: str) -> None: ... + def handle_entityref(self, name: str) -> None: ... + def handle_charref(self, name: str) -> None: ... + def handle_comment(self, data: str) -> None: ... + def handle_decl(self, decl: str) -> None: ... + def handle_pi(self, data: str) -> None: ... + def unknown_decl(self, data: str) -> None: ... diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/__init__.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0519860a3de21674d29433b9162025b8c1bf0d95 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/__init__.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/_version.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/_version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10fa2b19dc1a3886893da59377fb96bde9a43a36 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/_version.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/cli.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/cli.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d31f24083928c6c0596a60f6baa4a37b52fcfca4 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/cli.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/logger.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..502798cfc3ce102cb891a8914f01a630ca8bf577 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/logger.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/pytest_plugin.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8927bd73798dc2867f27b9e19f07466d2a87628 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/pytest_plugin.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..755be59ccae200fd196f38058f58b0ec46bce1e3 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema_registry.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema_registry.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78a1a136a243ca248d7517ff92e74c9a6dbfd053 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/schema_registry.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/traits.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/traits.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fca924cea389e88d7b304f675ddd53ad25cb2fc8 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/traits.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/utils.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef8f5e21355bac1414d0dd32f201c006bd0e0186 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/utils.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/validators.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..022776f1146f490a171b287b078bbe83f0b897bd Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/validators.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/yaml.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/yaml.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88e15f1f0a63c134bad429e7165af1dc5af313cd Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/__pycache__/yaml.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/schemas/event-core-schema.yml b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/schemas/event-core-schema.yml new file mode 100644 index 0000000000000000000000000000000000000000..b5ef5007a63736d2a9c3bdc4b0d0ab9260ec08ef --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jupyter_events/schemas/event-core-schema.yml @@ -0,0 +1,27 @@ +$schema: http://json-schema.org/draft-07/schema +$id: http://event.jupyter.org/event-schema +version: "1" +title: Event Schema +description: | + A schema for validating any Jupyter Event. +type: object +properties: + __metadata_version__: + title: Metadata Version + type: number + const: 1 + __schema_version__: + title: Schema Version + type: string + __schema__: + title: Schema ID + type: string + __timestamp__: + title: Event Timestamp + type: string + format: datetime +required: + - __metadata_version__ + - __schema__ + - __schema_version__ + - __timestamp__ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/__init__.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/README.md b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/README.md new file mode 100644 index 0000000000000000000000000000000000000000..16735aa5ca9ec2f7c96a91233ac2632b023b45a4 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/README.md @@ -0,0 +1,21 @@ +# Data set simulation + +This tool contains tools for creating large data sets of synthetic (yet realistic) transition-metal XP spectra base on reference data. Exemplary JSON files containing the parameters used during simulation available in the [`params`](https://github.com/surfaceanalytics/xpsdeeplearning/tree/main/simulation/params) subfolder. The reference data that is used as input for the simulation is available inside [`data`](https://github.com/surfaceanalytics/xpsdeeplearning/tree/main/data). + +# Usage +## Data simulation +```console +user@box:~$ simulate --help +Usage: simulate [OPTIONS] + + The CLI entrypoint for the convert function + +Options: + --param-file TEXT The path to the input parameter file to + read. [required] + --reload-from-previous-folder TEXT + The path to a previous run which is to be + continued. + --help Show this message and exit. + + diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/__init__.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/__init__.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f2a21095d6fd34ab7aca6a1de783eafe2667a81 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/__init__.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/figures.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/figures.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d759878826eb4acce65241aa797c8c2d16e393e Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/__pycache__/figures.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/__init__.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c0246a23f517f2adfbba80bfb5ee5e0e792d879 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/__init__.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/data_converter.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/data_converter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..504f65eee00b8b43df1d630bb781071b212e5743 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/data_converter.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/text_parser.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/text_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04f501a934bfeb6578ed18895b97c599db865b57 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/text_parser.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da6d34e17d99b9204ad167ef01aa1201ec12946a Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas_parser.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas_parser.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28e0318b0140efd8033c9d81a8367c39c3644ee6 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/vamas_parser.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/writers.cpython-310.pyc b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/writers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8dbd90ac78843c7a704ac42530d14fccd2f6624 Binary files /dev/null and b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/__pycache__/writers.cpython-310.pyc differ diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/vamas_parser.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/vamas_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..fe11e57673bac72ef50f2bcb49102cee373c76e9 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/vamas_parser.py @@ -0,0 +1,606 @@ +# +# Copyright the xpsdeeplearning authors. +# +# This file is part of xpsdeeplearning. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Dataconverter for XPS data in vms format. +""" + +import numpy as np + +from xpsdeeplearning.simulation.base_model.converters.vamas import Block, VamasHeader + + +class VamasParser: + """Parser for XPS data stored in Vamas files.""" + + def __init__(self): + """ + Initialize VamasHeader and blocks list. + + Returns + ------- + None. + + """ + self.header = VamasHeader() + self.blocks = [] + self.common_header_attr = [ + "formatID", + "instituteID", + "instrumentModelID", + "operatorID", + "experimentID", + "noCommentLines", + ] + + self.exp_var_attributes = ["expVarLabel", "expVarUnit"] + + self.norm_header_attr = [ + "scanMode", + "nrRegions", + "nrExpVar", + "unknown3", + "unknown4", + "unknown5", + "unknown6", + "noBlocks", + ] + + self.map_header_attr = [ + "scanMode", + "nrRegions", + "nr_positions", + "nr_x_coords", + "nr_y_coords", + "nrExpVar", + "unknown3", + "unknown4", + "unknown5", + "unknown6", + "noBlocks", + ] + + self.norm_block_attr = [ + "blockID", + "sampleID", + "year", + "month", + "day", + "hour", + "minute", + "second", + "noHrsInAdvanceOfGMT", + "noCommentLines", + "commentLines", + "technique", + "expVarValue", + "sourceLabel", + "sourceEnergy", + "unknown1", + "unknown2", + "unknown3", + "sourceAnalyzerAngle", + "unknown4", + "analyzerMode", + "resolution", + "magnification", + "workFunction", + "targetBias", + "analyzerWidthX", + "analyzerWidthY", + "analyzerTakeOffPolarAngle", + "analyzerAzimuth", + "speciesLabel", + "transitionLabel", + "particleCharge", + "abscissaLabel", + "abscissaUnits", + "abscissaStart", + "abscissaStep", + "noVariables", + "variableLabel1", + "variableUnits1", + "variableLabel2", + "variableUnits2", + "signalMode", + "dwellTime", + "noScans", + "timeCorrection", + "sampleAngleTilt", + "sampleTiltAzimuth", + "sampleRotation", + "noAdditionalParams", + "paramLabel1", + "paramUnit1", + "paramValue1", + "paramLabel2", + "paramUnit2", + "paramValue2", + "numOrdValues", + "minOrdValue1", + "maxOrdValue1", + "minOrdValue2", + "maxOrdValue2", + "dataString", + ] + + self.map_block_attr = [ + "blockID", + "sampleID", + "year", + "month", + "day", + "hour", + "minute", + "second", + "noHrsInAdvanceOfGMT", + "noCommentLines", + "commentLines", + "technique", + "x_coord", + "y_coord", + "expVarValue", + "sourceLabel", + "sourceEnergy", + "unknown1", + "unknown2", + "unknown3", + "fov_x", + "fovy", + "sourceAnalyzerAngle", + "unknown4", + "analyzerMode", + "resolution", + "magnification", + "workFunction", + "targetBias", + "analyzerWidthX", + "analyzerWidthY", + "analyzerTakeOffPolarAngle", + "analyzerAzimuth", + "speciesLabel", + "transitionLabel", + "particleCharge", + "abscissaLabel", + "abscissaUnits", + "abscissaStart", + "abscissaStep", + "noVariables", + "variableLabel1", + "variableUnits1", + "variableLabel2", + "variableUnits2", + "signalMode", + "dwellTime", + "noScans", + "timeCorrection", + "sampleAngleTilt", + "sampleTiltAzimuth", + "sampleRotation", + "noAdditionalParams", + "paramLabel1", + "paramUnit1", + "paramValue1", + "paramLabel2", + "paramUnit2", + "paramValue2", + "numOrdValues", + "minOrdValue1", + "maxOrdValue1", + "minOrdValue2", + "maxOrdValue2", + "dataString", + ] + + def parse_file(self, filepath): + """ + Parse .xy into a list of dictionaries caleld "self.data". + + Each dictionary is a grouping of related attributes. + These are later put into a hierarchical nested dictionary that + represents the native data structure of the export, and is + well represented by JSON. + """ + self._read_lines(filepath) + self._parse_header() + self._parse_blocks() + return self._build_dict() + + def _read_lines(self, filepath): + """ + Read all lines in the Vamas file. + + Parameters + ---------- + filepath : str + Has to be a .vms file. + + Returns + ------- + None. + + """ + self.data = [] + self.filepath = filepath + + with open(filepath, "rb") as vms_file: + for line in vms_file: + if line.endswith(b"\r\n") or line.endswith(b"\n"): + self.data += [line.decode("utf-8").strip()] + + def _parse_header(self): + """ + Parse the vamas header into a VamasHeader object. + + The common_header_attr are the header attributes that are common + to both types of Vamas format (NORM and MAP). + + Returns + ------- + None. + + """ + for attr in self.common_header_attr: + setattr(self.header, attr, self.data.pop(0).strip()) + n = int(self.header.noCommentLines) + comments = "" + for _ in range(n): + comments += self.data.pop(0) + self.header.commentLines = comments + self.header.expMode = self.data.pop(0).strip() + if self.header.expMode == "NORM": + for attr in self.norm_header_attr: + setattr(self.header, attr, self.data.pop(0).strip()) + if attr == "nrExpVar": + self._add_exp_var() + + elif self.header.expMode == "MAP": + for attr in self.map_header_attr: + setattr(self.header, attr, self.data.pop(0).strip()) + if attr == "nrExpVar": + self._add_exp_var() + + def _add_exp_var(self): + """ + Add the attribute exp_var to the VamasHeader. + + Returns + ------- + None. + + """ + for _ in range(int(self.header.nrExpVar)): + for attr in self.exp_var_attributes: + setattr(self.header, attr, self.data.pop(0).strip()) + + def _parse_blocks(self): + """ + Parse all blocks in the vamas data. + + Returns + ------- + None. + + """ + for _ in range(int(self.header.noBlocks)): + self._parseOneBlock() + + def _parseOneBlock(self): + """ + Parse one block of vamas data. + + Depending on the experimental mode, a differnt method is used. + + Returns + ------- + None. + + """ + if self.header.expMode == "NORM": + self.blocks += [self._parse_NORM_Block()] + elif self.header.expMode == "MAP": + self.blocks += [self._parse_MAP_block()] + + def _parse_NORM_Block(self): + """ + Parse a NORM block from Vamas. + + Returns + ------- + block : vamas.BLOCK + A Block object containing all data from one VAMAS block. + + """ + # start = time.time() + block = Block() + # stop = time.time() + # print("Block instantiated in time: " + str(stop-start)) + + # start = time.time() + + block.blockID = self.data.pop(0).strip() + block.sampleID = self.data.pop(0).strip() + block.year = int(self.data.pop(0).strip()) + block.month = int(self.data.pop(0).strip()) + block.day = int(self.data.pop(0).strip()) + block.hour = int(self.data.pop(0).strip()) + block.minute = int(self.data.pop(0).strip()) + block.second = int(self.data.pop(0).strip()) + block.noHrsInAdvanceOfGMT = int(self.data.pop(0).strip()) + block.noCommentLines = int(self.data.pop(0).strip()) + for _ in range(block.noCommentLines): + block.commentLines += self.data.pop(0) + block.technique = self.data.pop(0).strip() + for _ in range(int(self.header.nrExpVar)): + block.expVarValue = self.data.pop(0).strip() + block.sourceLabel = self.data.pop(0).strip() + block.sourceEnergy = float(self.data.pop(0).strip()) + block.unknown1 = self.data.pop(0).strip() + block.unknown2 = self.data.pop(0).strip() + block.unknown3 = self.data.pop(0).strip() + block.sourceAnalyzerAngle = self.data.pop(0).strip() + block.unknown4 = self.data.pop(0).strip() + block.analyzerMode = self.data.pop(0).strip() + block.resolution = float(self.data.pop(0).strip()) + block.magnification = self.data.pop(0).strip() + block.workFunction = float(self.data.pop(0).strip()) + block.targetBias = float(self.data.pop(0).strip()) + block.analyzerWidthX = self.data.pop(0).strip() + block.analyzerWidthY = self.data.pop(0).strip() + block.analyzerTakeOffPolarAngle = self.data.pop(0).strip() + block.analyzerAzimuth = self.data.pop(0).strip() + block.speciesLabel = self.data.pop(0).strip() + block.transitionLabel = self.data.pop(0).strip() + block.particleCharge = self.data.pop(0).strip() + block.abscissaLabel = self.data.pop(0).strip() + block.abscissaUnits = self.data.pop(0).strip() + block.abscissaStart = float(self.data.pop(0).strip()) + block.abscissaStep = float(self.data.pop(0).strip()) + block.noVariables = int(self.data.pop(0).strip()) + for param in range(block.noVariables): + name = "variableLabel" + str(param + 1) + setattr(block, name, self.data.pop(0).strip()) + name = "variableUnits" + str(param + 1) + setattr(block, name, self.data.pop(0).strip()) + block.signalMode = self.data.pop(0).strip() + block.dwellTime = float(self.data.pop(0).strip()) + block.noScans = int(self.data.pop(0).strip()) + block.timeCorrection = self.data.pop(0).strip() + block.sampleAngleTilt = float(self.data.pop(0).strip()) + block.sampleTiltAzimuth = float(self.data.pop(0).strip()) + block.sampleRotation = float(self.data.pop(0).strip()) + block.noAdditionalParams = int(self.data.pop(0).strip()) + for param in range(block.noAdditionalParams): + name = "paramLabel" + str(param + 1) + setattr(block, name, self.data.pop(0)) + name = "paramUnit" + str(param + 1) + setattr(block, name, self.data.pop(0)) + name = "paramValue" + str(param + 1) + setattr(block, name, self.data.pop(0)) + block.numOrdValues = int(self.data.pop(0).strip()) + for param in range(block.noVariables): + name = "minOrdValue" + str(param + 1) + setattr(block, name, float(self.data.pop(0).strip())) + name = "maxOrdValue" + str(param + 1) + setattr(block, name, float(self.data.pop(0).strip())) + + # stop = time.time() + # print("Block metadata added in time: " + str(stop-start)) + + # start = time.time() + self._add_data_values(block) + # stop = time.time() + # print("Block data added in time: " + str(stop-start)) + + return block + + def _parse_MAP_block(self): + """ + Parse a MAP block from Vamas. + + Returns + ------- + block : vamas.BLOCK + A Block object containing all data from one VAMAS block. + + """ + block = Block() + block.blockID = self.data.pop(0).strip() + block.sampleID = self.data.pop(0).strip() + block.year = int(self.data.pop(0).strip()) + block.month = int(self.data.pop(0).strip()) + block.day = int(self.data.pop(0).strip()) + block.hour = int(self.data.pop(0).strip()) + block.minute = int(self.data.pop(0).strip()) + block.second = int(self.data.pop(0).strip()) + block.noHrsInAdvanceOfGMT = int(self.data.pop(0).strip()) + block.noCommentLines = int(self.data.pop(0).strip()) + for _ in range(block.noCommentLines): + self.data.pop(0) + block.commentLines += self.data.pop(0) + block.technique = self.data.pop(0).strip() + block.x_coord = self.data.pop(0).strip() + block.y_coord = self.data.pop(0).strip() + block.expVarValue = self.data.pop(0).strip() + block.sourceLabel = self.data.pop(0).strip() + block.sourceEnergy = float(self.data.pop(0).strip()) + block.unknown1 = self.data.pop(0).strip() + block.unknown2 = self.data.pop(0).strip() + block.unknown3 = self.data.pop(0).strip() + block.fov_x = self.data.pop(0).strip() + block.fov_y = self.data.pop(0).strip() + block.sourceAnalyzerAngle = self.data.pop(0).strip() + block.unknown4 = self.data.pop(0).strip() + block.analyzerMode = self.data.pop(0).strip() + block.resolution = float(self.data.pop(0).strip()) + block.magnification = self.data.pop(0).strip() + block.workFunction = float(self.data.pop(0).strip()) + block.targetBias = float(self.data.pop(0).strip()) + block.analyzerWidthX = self.data.pop(0).strip() + block.analyzerWidthY = self.data.pop(0).strip() + block.analyzerTakeOffPolarAngle = self.data.pop(0).strip() + block.analyzerAzimuth = self.data.pop(0).strip() + block.speciesLabel = self.data.pop(0).strip() + block.transitionLabel = self.data.pop(0).strip() + block.particleCharge = self.data.pop(0).strip() + block.abscissaLabel = self.data.pop(0).strip() + block.abscissaUnits = self.data.pop(0).strip() + block.abscissaStart = float(self.data.pop(0).strip()) + block.abscissaStep = float(self.data.pop(0).strip()) + block.noVariables = int(self.data.pop(0).strip()) + for p in range(block.noVariables): + name = "variableLabel" + str(p + 1) + setattr(block, name, self.data.pop(0).strip()) + name = "variableUnits" + str(p + 1) + setattr(block, name, self.data.pop(0).strip()) + block.signalMode = self.data.pop(0).strip() + block.dwellTime = float(self.data.pop(0).strip()) + block.noScans = int(self.data.pop(0).strip()) + block.timeCorrection = self.data.pop(0).strip() + block.sampleAngleTilt = float(self.data.pop(0).strip()) + block.sampleTiltAzimuth = float(self.data.pop(0).strip()) + block.sampleRotation = float(self.data.pop(0).strip()) + block.noAdditionalParams = int(self.data.pop(0).strip()) + for p in range(block.noAdditionalParams): + name = "paramLabel" + str(p + 1) + setattr(block, name, self.data.pop(0)) + name = "paramUnit" + str(p + 1) + setattr(block, name, self.data.pop(0)) + name = "paramValue" + str(p + 1) + setattr(block, name, self.data.pop(0)) + block.numOrdValues = int(self.data.pop(0).strip()) + for p in range(block.noVariables): + name = "minOrdValue" + str(p + 1) + setattr(block, name, float(self.data.pop(0).strip())) + name = "maxOrdValue" + str(p + 1) + setattr(block, name, float(self.data.pop(0).strip())) + + self._add_data_values(block) + + return block + + def _add_data_values(self, block): + """ + Add the data from one block to a dictionary. + + Parameters + ---------- + block : vamas.Block + Block object with the dictionary as an attribute. + + Returns + ------- + None. + + """ + data_dict = {} + start = float(block.abscissaStart) + step = float(block.abscissaStep) + num = int(block.numOrdValues / block.noVariables) + x = [round(start + i * step, 2) for i in range(num)] + + if block.abscissaLabel == "binding energy": + x.reverse() + + setattr(block, "x", x) + + for var in range(block.noVariables): + name = "y" + str(var) + data_dict[name] = [] + + data_list = list(np.array(self.data[: block.numOrdValues], dtype=np.float32)) + + self.data = self.data[block.numOrdValues :] + + for var in range(block.noVariables): + max_var = block.noVariables + name = "y" + str(var) + data_one = data_list[var::max_var] + data_dict[name] = data_one + setattr(block, name, data_dict[name]) + + def _build_dict(self): + """ + Construct a list of dictionaries. + + Each dictionary contains all the data and metadata of a spectrum. + vamas.sampleID -> group["name"] + vamas. + """ + group_id = -1 + temp_group_name = "" + spectra = [] + + for idx, block in enumerate(self.blocks): + group_name = block.sampleID + + # This set of conditions detects if the group name has + # changed. If it has, then it increments the group_idx. + if group_name != temp_group_name: + temp_group_name = group_name + group_id += 1 + + spectrum_type = str(block.speciesLabel + block.transitionLabel) + spectrum_id = idx + + settings = { + "analysis_method": block.technique, + "dwell_time": block.dwellTime, + "workfunction": block.workFunction, + "excitation_energy": block.sourceEnergy, + "pass_energy": block.resolution, + "scan_mode": block.analyzerMode, + "source_label": block.sourceLabel, + "nr_values": int(block.numOrdValues / block.noVariables), + "x_units": block.abscissaLabel, + "y_units": block.variableLabel1, + } + + date = ( + str(block.year) + + "-" + + str(block.month) + + "-" + + str(block.day) + + " " + + str(block.hour) + + ":" + + str(block.minute) + + ":" + + str(block.second) + ) + + data = {"x": block.x} + for n in range(int(block.noVariables)): + key = "y" + str(n) + data[key] = getattr(block, key) + + spec_dict = { + "date": date, + "group_name": group_name, + "group_id": group_id, + "spectrum_type": spectrum_type, + "spectrum_id": spectrum_id, + "scans": block.noScans, + "settings": settings, + "data": data, + } + + spectra += [spec_dict] + + self.data_dict = spectra + return self.data_dict diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/writers.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/writers.py new file mode 100644 index 0000000000000000000000000000000000000000..1c8e8495199256a9e6b3060110bd71ec77640812 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/base_model/converters/writers.py @@ -0,0 +1,183 @@ +# Copyright the xpsdeeplearning authors. +# +# This file is part of xpsdeeplearning. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Write for XPS data to vms/txt format. +""" + +import re +from copy import copy + +import numpy as np + +from xpsdeeplearning.simulation.base_model.converters.vamas import Block, VamasHeader + + +class TextWriter: + """Writer for txt format.""" + + def __init__(self): + pass + + def write(self, data, filename): + """Build header and data and write to txt file.""" + lines = self.build_lines(data) + + with open(str(filename), "w") as file: + for line in lines: + file.writelines(line["header_line"] + "\n") + for data_line in line["data_lines"]: + file.writelines(str(data_line) + "\n") + + def build_lines(self, data): + """Build header and data to dict.""" + lines = [] + for data_dict in data: + header_line = data_dict["spectrum_type"] + " " + data_dict["group_name"] + data_lines = [ + str(np.round(x, 3)) + " " + str(y) + for x, y in zip(data_dict["data"]["x"], data_dict["data"]["y0"]) + ] + lines.append({"header_line": header_line, "data_lines": data_lines}) + + return lines + + +class VamasWriter: + """Writer for VAMAS format.""" + + def __init__(self): + self.normalize = 0 + self.filename = "" + self.num_spectra = 0 + self.file_path = "" + self.millenium = 2000 + self.header_lines = 15 + self.spectra = [] + self.blocks = [] + self.sourceAnalyzerAngle = "56.5" + self.vamas_header = VamasHeader() + self.scans_averaged = 0 + self.loops_averaged = 0 + self.count_type = "Counts per Second" + self.blocks_counter = 0 + self.blocks = [] + + def write(self, data, filename): + """This method converts a nested dictionary into vamas format + and writes it to a vamas file. + """ + self.filename = "" + + for spec in data: + block = Block() + block.sampleID = spec["group_name"] + + block.blockID = spec["spectrum_type"] + block.noCommentLines = 10 + block.commentLines = ( + "Casa Info Follows\n0\n0\n0\n0\n" + + "none" + + "\nGroup: " + + "none" + + "\nAnalyzer Lens: " + + "none" + + "\nAnalyzer Slit: " + + "none" + + "\nScan Mode: " + + "none" + ) + block.expVarValue = 0 + split_string = re.split(r"(\d)", spec["spectrum_type"]) + species = split_string[0] + transition = "".join(split_string[1:]) + block.speciesLabel = species + block.transitionLabel = transition + block.noScans = spec["scans"] + if len(spec["date"].split(" ")) == 3: + date, time, zone = spec["date"].split(" ") + elif len(spec["date"].split(" ")) == 2: + date, time = spec["date"].split(" ") + if len(date.split("/")) == 3: + block.month, block.day, block.year = date.split("/") + elif len(date.split("-")) == 3: + block.month, block.day, block.year = date.split("-") + if len(block.year) == 2: + block.year = int(block.year) + self.millenium + block.hour, block.minute, block.second = time.split(":") + block.noHrsInAdvanceOfGMT = zone.strip("UTC") + setting = spec["settings"] + block.technique = setting["analysis_method"] + block.sourceLabel = setting["source_label"] + block.sourceEnergy = setting["excitation_energy"] + block.sourceAnalyzerAngle = self.sourceAnalyzerAngle + block.analyzerMode = "FAT" + block.resolution = setting["pass_energy"] + block.workFunction = setting["workfunction"] + block.dwellTime = setting["dwell_time"] + + y_units = setting["y_units"] + if y_units == "Counts per Second": + y = [ + i * float(block.dwellTime) * float(block.noScans) + for i in spec["data"]["y0"] + ] + else: + y = list(spec["data"]["y0"]) + if self.normalize != 0: + norm = self.normalize + y = [ + spec["data"]["y0"][i] / spec["data"]["y" + str(norm)][i] + for i in range(len(spec["data"]["y0"])) + ] + x_units = setting["x_units"] + if (x_units == "Binding Energy") & ( + setting["scan_mode"] != "FixedEnergies" + ): + block.abscissaStart = str( + float(block.sourceEnergy) - float(setting["binding_energy"]) + ) + else: + block.abscissaStart = spec["data"]["x"][0] + block.abscissaStep = abs(spec["data"]["x"][1] - spec["data"]["x"][0]) + + if "nr_values" not in setting.keys(): + nr_values = len(spec["data"]["y0"]) + block.numOrdValues = str(int(nr_values * int(block.noAdditionalParams))) + else: + block.numOrdValues = str( + int(setting["nr_values"]) * int(block.noAdditionalParams) + ) + block.minOrdValue1 = min(spec["data"]["y0"]) + block.maxOrdValue1 = max(spec["data"]["y0"]) + block.minOrdValue2 = 1 + block.maxOrdValue2 = 1 + for i in y: + block.dataString += str(i) + "\n1\n" + block.dataString = block.dataString[:-1] + self.blocks += [copy(block)] + block.dataString = "" + self.num_spectra = len(self.blocks) + self.vamas_header.noBlocks = self.num_spectra + + with open(str(filename), "w") as file: + for item in self.vamas_header.__dict__.values(): + file.writelines(str(item) + "\n") + for block in self.blocks: + for item in block.__dict__.values(): + file.writelines(str(item) + "\n") + file.writelines("end of experiment") + file.close() diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/creator.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/creator.py new file mode 100644 index 0000000000000000000000000000000000000000..94eb7947e257f396811b940349d53a90e534f7e7 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/creator.py @@ -0,0 +1,1262 @@ +# +# Copyright the xpsdeeplearning authors. +# +# This file is part of xpsdeeplearning. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Simulate artifical XPS spectra using the Creator class. +""" + +import warnings +import os +import datetime +import json +import pandas as pd +from pandas.errors import SettingWithCopyWarning +import h5py +import matplotlib.pyplot as plt +import numpy as np + +from xpsdeeplearning.simulation.base_model.spectra import ( + safe_arange_with_edges, + MeasuredSpectrum, +) +from xpsdeeplearning.simulation.base_model.figures import Figure +from xpsdeeplearning.simulation.sim import Simulation + + +class Creator: + """Class for simulating mixed XPS spectra.""" + + def __init__(self, params=None): + """ + Prepare simulation run. + + Loading the input spectra and creating the empty simulation + matrix based on the number of input spectra. + + Parameters + ---------- + no_of_simulations : int + The number of spectra that will be simulated. + input_filenames : list + List of strings that defines the seed files for the + simulations. + single : bool, optional + If single, then only one of the input spectra will be used + for creating a single spectrum. If not single, a linear + combination of all spectra will be used. + The default is True. + variable_no_of_inputs : bool, optional + If variable_no_of_inputs and if single, then the number of + input spectra used in the linear combination will be + randomly chosen from the interval + (1, No. of input spectra). + The default is True. + + Returns + ------- + None. + + """ + + default_param_filename = "params/default_params.json" + default_param_filepath = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + default_param_filename, + ) + + with open(default_param_filepath, "r") as param_file: + self.params = json.load(param_file) + self.params["init_param_filepath"] = default_param_filepath + + self.sim_ranges = self.params["sim_ranges"] + + timestamp = datetime.datetime.now().strftime("%Y%m%d") + self.params["timestamp"] = timestamp + + # Replace the default params with the supplied params + # if available. + if params is not None: + for key in params.keys(): + if key != "sim_ranges": + self.params[key] = params[key] + else: + for subkey in params["sim_ranges"].keys(): + self.sim_ranges[subkey] = params["sim_ranges"][subkey] + + # Print parameter file name. + print(f"Parameters were taken from {self.params['init_param_filepath']}.") + del self.params["init_param_filepath"] + + self.name = self.params["timestamp"] + "_" + self.params["name"] + self.no_of_simulations = self.params["no_of_simulations"] + + self.labels = self.params["labels"] + self.spectra = self.params["spectra"] + + # Warning if core and auger spectra of same species are + # not scaled together. + if not self.params["same_auger_core_percentage"]: + warnings.warn( + "Auger and core spectra of the same species are not scaled" + " together. If you have Auger spectra, you may want to set" + " 'same_auger_core_percentage' to True!" + ) + + # Load input spectra from all reference sets. + self.input_spectra = self.load_input_spectra(self.params["input_filenames"]) + + # No. of parameter = 1 ref_set + no. of linear parameter + 6 + # (one parameter each for resolution, shift_x, signal_to noise, + # scatterer, distance, pressure) + self.no_of_linear_params = len(self.spectra) + no_of_params = 1 + self.no_of_linear_params + 6 + + self.simulation_matrix = np.zeros((self.no_of_simulations, no_of_params)) + + # Create the parameter matrix for the simulation. + self.create_matrix( + single=self.params["single"], + variable_no_of_inputs=self.params["variable_no_of_inputs"], + always_auger=self.params["always_auger"], + always_core=self.params["always_core"], + ) + + self.df = pd.DataFrame() + + def load_input_spectra(self, filenames): + """ + Load input spectra. + + Load input spectra from all reference sets into DataFrame. + Will store NaN value if no reference is available. + + Parameters + ---------- + filenames : dict + Dictionary of list with filenames to load. + + Returns + ------- + pd.DataFrame + A DataFrame containing instances of MeasuredSpectrum. + Each row contains one set of reference spectra. + + """ + input_spectra = pd.DataFrame(columns=self.spectra) + + input_datapath = os.path.join( + *[ + os.path.dirname(os.path.abspath(__file__)).partition("simulation")[0], + "data", + "references", + ] + ) + + input_spectra_list = [] + for value_list in filenames.values(): + ref_spectra_dict = {} + for filename in value_list: + filepath = os.path.join(input_datapath, filename) + measured_spectrum = MeasuredSpectrum(filepath) + if self.params["normalize_inputs"]: + measured_spectrum.normalize() + label = next(iter(measured_spectrum.label.keys())) + ref_spectra_dict[label] = measured_spectrum + input_spectra_list.append(ref_spectra_dict) + + return pd.concat( + [input_spectra, pd.DataFrame(input_spectra_list)], + join="outer", + ) + + def create_matrix( + self, + single=False, + variable_no_of_inputs=True, + always_auger=False, + always_core=True, + ): + """ + Create matrix for multiple simulations. + + Creates the numpy array "simulation_matrix" (instance + variable) that is used to simulate the new spectra. + simulation_matrix has the dimensions (n x p), where: + n: no. of spectra that will be created + p: number of parameters + p = no. of input spectra + 3 (resolution,shift,noise) + The parameters are chosen randomly. For the last three + parameters, the random numbers are integers drawn from specific + intervals that create reasonable spectra. + + Parameters + ---------- + single : bool, optional + If single, only one input spectrum is taken. + The default is False. + variable_no_of_inputs : bool, optional + If variable_no_of_inputs and if single, then the number of + input spectra used in the linear combination will be + randomly chosen from the interval + (1, No. of input spectra). + The default is True. + always_auger : bool, optional + If always_auger, there will always be at least one + Auger spectrum in the output spectrum. + The default is False. + always_core : bool, optional + If always_auger, there will always be at least one + core level spectrum in the output spectrum. + The default is True. + + Returns + ------- + None. + + """ + for i in range(self.no_of_simulations): + key = self.select_reference_set() # select a set of references + self.simulation_matrix[i, 0] = int(key) + + self.simulation_matrix[i, 1 : self.no_of_linear_params + 1] = ( + self.select_scaling_params( + key=key, + single=single, + variable_no_of_inputs=variable_no_of_inputs, + always_auger=always_auger, + always_core=always_core, + ) + ) + + self.simulation_matrix[i, self.no_of_linear_params + 1 :] = ( + self.select_sim_params(key) + ) + + print( + "Random parameters: " + str(i + 1) + "/" + str(self.no_of_simulations) + ) + + def select_reference_set(self): + """ + Randomly select a number for calling one of the reference sets. + + Returns + ------- + int + A number between 0 and the total number of input + reference sets. + + """ + return np.random.randint(0, self.input_spectra.shape[0]) + + def select_scaling_params( + self, + key, + single=False, + variable_no_of_inputs=True, + always_auger=False, + always_core=True, + ): + """ + Randomly select parameters for linear combination. + + Select scaling parameters for a simulation from one set + of reference spectra (given by key). + + Parameters + ---------- + key : int + Integer number of the reference spectrum set to use. + single : bool, optional + If single, only one input spectrum is taken. + The default is False. + variable_no_of_inputs : bool, optional + If variable_no_of_inputs and if single, then the number of + input spectra used in the linear combination will be + randomly chosen from the interval + (1, No. of input spectra). + The default is True. + always_auger : bool, optional + If always_auger, there will always be at least one + Auger spectrum in the output spectrum. + The default is False. + always_core : bool, optional + If always_auger, there will always be at least one + core level spectrum in the output spectrum. + The default is True. + + Returns + ------- + linear_params : list + A list of parameters for the linear combination of r + eference spectra. + + """ + linear_params = [0.0] * self.no_of_linear_params + + # Get input spectra from one set of references. + inputs = self.input_spectra.iloc[[key]] + + # Select indices where a spectrum is available for this key. + indices = [ + self.spectra.index(j) + for j in inputs.columns[inputs.isnull().any() == False].tolist() # noqa: E712 + ] + indices_empty = [ + self.spectra.index(j) + for j in inputs.columns[inputs.isnull().any()].tolist() + ] + + # This ensures that always just one Auger region is used. + auger_spectra = [] + core_spectra = [] + for spec in inputs.iloc[0]: + if str(spec) != "nan": + if spec.spectrum_type == "auger": + auger_spectra.append(spec) + if spec.spectrum_type == "core_level": + core_spectra.append(spec) + auger_region = self._select_one_auger_region(auger_spectra) + + selected_auger_spectra = [ + auger_spectrum + for auger_spectrum in auger_spectra + if (auger_region in list(auger_spectrum.label.keys())[0]) + ] + unselected_auger_spectra = [ + auger_spectrum + for auger_spectrum in auger_spectra + if (auger_region not in list(auger_spectrum.label.keys())[0]) + ] + selected_auger_indices = [ + inputs.columns.get_loc(list(s.label.keys())[0]) + for s in selected_auger_spectra + ] + unselected_auger_indices = [ + inputs.columns.get_loc(list(s.label.keys())[0]) + for s in unselected_auger_spectra + ] + + if single: + # Set one parameter to 1 and others to 0. + rand = np.random.choice(indices) + linear_params[rand] = 1.0 + else: + if variable_no_of_inputs: + # Randomly choose how many spectra shall be combined + no_of_spectra = np.random.randint(1, len(indices) + 1) + params = [0.0] * no_of_spectra + while sum(params) == 0.0: + params = [np.random.uniform(0.1, 1.0) for j in range(no_of_spectra)] + + params = self._normalize_float_list(params) + # Don"t allow parameters below 0.1. + for param in params: + if param <= 0.1: + params[params.index(param)] = 0.0 + + params = self._normalize_float_list(params) + + # Add zeros if no_of_spectra < no_of_linear_params. + for _ in range(len(indices) - no_of_spectra): + params.append(0.0) + + else: + # Linear parameters + r = [np.random.uniform(0.1, 1.0) for j in range(len(indices))] + params = self._normalize_float_list(r) + + while all(p >= 0.1 for p in params) is not False: + # sample again if one of the parameters is smaller + # than 0.1. + r = [np.random.uniform(0.1, 1.0) for j in range(len(indices))] + params = self._normalize_float_list(r) + + # Randomly shuffle so that zeros are equally distributed. + np.random.shuffle(params) + # Add linear params at the positions where there + # are reference spectra available + param_iter = iter(params) + for index in indices: + linear_params[index] = next(param_iter) + + # Making sure that a single spectrum is not moved + # to the undesired Auger region. + test_indices = [i for i in indices if i not in unselected_auger_indices] + while all(p == 0.0 for p in [linear_params[i] for i in test_indices]): + np.random.shuffle(params) + param_iter = iter(params) + for index in indices: + linear_params[index] = next(param_iter) + + # Remove undesired auger spectra + for index in unselected_auger_indices: + linear_params[index] = 0.0 + + if self.params["same_auger_core_percentage"]: + # Set percentage for core and auger spectra of the + # same species to the same value. + linear_params = self._scale_auger_core_together( + linear_params, selected_auger_spectra, core_spectra + ) + + linear_params = self._normalize_float_list(linear_params) + + # If no spectrum is available, set the corresponding + # linear param to NaN. + for index in indices_empty: + linear_params[index] = float("NAN") + + if always_auger: + # Always use at least one Auger spectrum + # when available. + if all( + p == 0.0 for p in [linear_params[i] for i in selected_auger_indices] + ): + linear_params = self.select_scaling_params( + key=key, + single=single, + variable_no_of_inputs=variable_no_of_inputs, + always_auger=always_auger, + always_core=always_core, + ) + if always_core: + # Always use at least one core level spectrum + # when available. + core_level_indices = [ + inputs.columns.get_loc(list(s.label.keys())[0]) for s in core_spectra + ] + if all(p == 0.0 for p in [linear_params[i] for i in core_level_indices]): + linear_params = self.select_scaling_params( + key=key, + single=single, + variable_no_of_inputs=variable_no_of_inputs, + always_auger=always_auger, + always_core=always_core, + ) + + return linear_params + + def select_sim_params(self, row): + """ + Select parameters for one row in the simulation matrix. + + Parameters + ---------- + row : int + Row in the simulation matrix to fill. + + Returns + ------- + sim_params : list + List of parameters for changing a spectrum using + various processing steps. + + """ + sim_params = [0.0] * 6 + + # FWHM + sim_params[-6] = self._select_random_fwhm() + + # shift_x + # Get step from first existing spectrum + for spectrum in self.input_spectra.iloc[row]: + try: + step = spectrum.step + break + except AttributeError: + continue + sim_params[-5] = self._select_random_shift_x(step) + + # Signal-to-noise + sim_params[-4] = self._select_random_noise() + + # Scattering + # Scatterer + sim_params[-3] = self._select_random_scatterer() + # Pressurex + sim_params[-2] = self._select_random_scatter_pressure() + # Distance + sim_params[-1] = self._select_random_scatter_distance() + + return sim_params + + def _select_random_fwhm(self): + if self.params["broaden"] is not False: + return ( + np.random.randint( + self.sim_ranges["FWHM"][0] * 1000, + self.sim_ranges["FWHM"][1] * 1000, + ) + / 1000 + ) + return 0 + + def _select_random_shift_x(self, step): + if self.params["shift_x"] is not False: + shift_range = np.arange( + self.sim_ranges["shift_x"][0], + self.sim_ranges["shift_x"][1], + step, + ) + r = np.round(np.random.randint(0, len(shift_range)), decimals=2) + if -step < np.round(shift_range[r], 2) < step: + shift_range[r] = 0 + + return shift_range[r] + return 0 + + def _select_random_noise(self): + if self.params["noise"] is not False: + return ( + np.random.randint( + self.sim_ranges["noise"][0] * 1000, + self.sim_ranges["noise"][1] * 1000, + ) + / 1000 + ) + return 0 + + def _select_random_scatterer(self): + if self.params["scatter"] is not False: + # Scatterer ID + return np.random.randint(0, len(self.sim_ranges["scatterers"].keys())) + return None + + def _select_random_scatter_pressure(self): + if self.params["scatter"] is not False: + return ( + np.random.randint( + self.sim_ranges["pressure"][0] * 100, + self.sim_ranges["pressure"][1] * 100, + ) + / 100 + ) + return 0 + + def _select_random_scatter_distance(self): + if self.params["scatter"] is not False: + return ( + np.random.randint( + self.sim_ranges["distance"][0] * 100, + self.sim_ranges["distance"][1] * 100, + ) + / 100 + ) + return 0 + + def _select_one_auger_region(self, auger_spectra): + """ + Randomly select one region of Auger spectra. + + Checks for the available Auger spectra and randomly + selects one emission line. + + Returns + ------- + str + Name of the randomly selected Auger region. + + """ + labels = [list(s.label.keys())[0] for s in auger_spectra] + if not labels: + return [] + + auger_regions = {label.split(" ", 1)[0] for label in labels} + auger_regions = list(auger_regions) + + r = np.random.randint(0, len(auger_regions)) + + return auger_regions[r] + + def _scale_auger_core_together( + self, linear_params, selected_auger_spectra, core_spectra + ): + """ + Set core/auger percentage of the same species to same value. + + Parameters + ---------- + linear_params : list + List of all linear parameters. + selected_auger_spectra : list + List of all selected Auger spectra. + core_spectra : list + List of all avialble core level spectra. + + Returns + ------- + linear_params : list + New list of linear parameters where core and auger spectra + of the same species have the same percentage. + + """ + auger_labels = [list(s.label.keys())[0] for s in selected_auger_spectra] + auger_phases = [label.split(" ", 1)[1] for label in auger_labels] + core_labels = [list(s.label.keys())[0] for s in core_spectra] + core_phases = [label.split(" ", 1)[1] for label in core_labels] + overlapping_species = [phase for phase in auger_phases if phase in core_phases] + + for species in overlapping_species: + label_auger = [label for label in auger_labels if species in label][0] + label_core = [label for label in core_labels if species in label][0] + i_auger = self.spectra.index(label_auger) + i_core = self.spectra.index(label_core) + max_value = np.max([linear_params[i_auger], linear_params[i_core]]) + linear_params[i_auger] = max_value + linear_params[i_core] = max_value + + return linear_params + + def _normalize_float_list(self, list_of_floats): + """ + Normalize a list of float by its sum. + + Parameters + ---------- + param_list : list + List of floats. + + Returns + ------- + list + Normalized list of floats + (or original list if all entries are 0.) + + """ + try: + return [k / sum(list_of_floats) for k in list_of_floats] + except ZeroDivisionError: + return list_of_floats + + def run(self): + """ + Run the simulations. + + The artificial spectra are createad using the Simulation + class and the simulation matrix. All data is then stored in + a dataframe. + + Returns + ------- + None. + + """ + dict_list = [] + for i in range(self.no_of_simulations): + ref_set_key = int(self.simulation_matrix[i, 0]) + + # Only select input spectra and scaling parameter + # for the references that are avalable. + sim_input_spectra = [ + spectrum + for spectrum in self.input_spectra.iloc[ref_set_key].tolist() + if str(spectrum) != "nan" + ] + + scaling_params = [ + p + for p in self.simulation_matrix[i][1 : self.no_of_linear_params + 1] + if str(p) != "nan" + ] + + sim = Simulation(sim_input_spectra) + + sim.combine_linear(scaling_params=scaling_params) + + fwhm = self.simulation_matrix[i][-6] + shift_x = self.simulation_matrix[i][-5] + signal_to_noise = self.simulation_matrix[i][-4] + scatterer_id = self.simulation_matrix[i][-3] + pressure = self.simulation_matrix[i][-2] + distance = self.simulation_matrix[i][-1] + + try: + # In order to assign a label, the scatterers are encoded + # by numbers. + scatterer_label = self.sim_ranges["scatterers"][str(int(scatterer_id))] + except ValueError: + scatterer_label = None + + sim.change_spectrum( + fwhm=fwhm, + shift_x=shift_x, + signal_to_noise=signal_to_noise, + scatterer={ + "label": scatterer_label, + "distance": distance, + "pressure": pressure, + }, + ) + + if self.params["normalize_outputs"]: + sim.output_spectrum.normalize() + + dict_1 = {"reference_set": ref_set_key} + dict_2 = self._dict_from_one_simulation(sim) + new_dict = {**dict_1, **dict_2} + dict_list.append(new_dict) + print("Simulation: " + str(i + 1) + "/" + str(self.no_of_simulations)) + + print("Number of created spectra: " + str(self.no_of_simulations)) + + self.df = pd.DataFrame(dict_list) + + if self.params["ensure_same_length"]: + self.df = self._extend_spectra_in_df(self.df) + + self._prepare_metadata_after_run() + + return self.df + + def _dict_from_one_simulation(self, sim): + """ + Create a dictionary with data from one simulation event. + + Parameters + ---------- + sim : Simulation + The simulation for which the dictionary shall be created. + + Returns + ------- + sim_dict : dict + Dictionaty containing all simulation data. + + """ + spectrum = sim.output_spectrum + + # Add all percentages of one species together. + new_label = {} + out_phases = [] + for key, value in spectrum.label.items(): + phase = key.split(" ", 1)[1] + if phase not in out_phases: + new_label[phase] = value + else: + new_label[phase] += value + out_phases.append(phase) + + spectrum.label = new_label + + y = np.reshape(spectrum.lineshape, (spectrum.lineshape.shape[0], -1)) + + sim_dict = { + "label": spectrum.label, + "shift_x": spectrum.shift_x, + "noise": spectrum.signal_to_noise, + "FWHM": spectrum.fwhm, + "scatterer": spectrum.scatterer, + "distance": spectrum.distance, + "pressure": spectrum.pressure, + "x": spectrum.x, + "y": y, + } + for label_value in self.labels: + if label_value not in sim_dict["label"].keys(): + sim_dict["label"][label_value] = 0.0 + + return sim_dict + + def _extend_spectra_in_df(self, df): + """ + Extend all x and y columns in the dataframe to the same length. + + Parameters + ---------- + df : pd.DataFrame + A dataframe with "x" and "y" columns containing 1D numpy + arrays. + + Returns + ------- + df : pd.DataFrame + The same dataframe as the input, with the arrays in the + "x" and "y" columns all having the same shape. + """ + max_length = np.max([y.shape for y in self.df["y"].tolist()]) + + data_list = df[["x", "y"]].values.tolist() + new_spectra = [] + + for x_arr, y_arr in data_list: + x_new, y_new = self._extend_xy(x_arr, y_arr, max_length) + new_data_dict = {"x": x_new, "y": y_new} + new_spectra.append(new_data_dict) + + df.update(pd.DataFrame(new_spectra)) + + return df + + def _extend_xy(self, X0, Y0, new_length): + """ + Extend two 1D arrays to a new length. + + Parameters + ---------- + X0 : np.ndarray + Regularly spaced 1D array. + Y0 : np.ndarray + 1D array of the same size as X0. + new_length : int + Length of new array. + + Returns + ------- + None. + + """ + + def start_stop_step_from_x(arr): + """ + Calculcate start, stop, and step from a regular array. + + Parameters + ---------- + arr : ndarrray + A numpy array with regular spacing, + i.e. the same step size between all points. + + Returns + ------- + start : int + Minimal value of arr. + stop : int + Maximal value of arr. + step : float + Step size between points in arr. + + """ + start = np.min(arr) + stop = np.max(arr) + + x1 = np.roll(arr, -1) + diff = np.abs(np.subtract(arr, x1)) + step = np.round(np.min(diff[diff != 0]), 3) + + return start, stop, step + + len_diff = new_length - X0.shape[0] + + if len_diff > 0.0: + start0, stop0, step0 = start_stop_step_from_x(X0) + start = start0 - int(len_diff / 2) * step0 + stop = stop0 + int(len_diff / 2) * step0 + + X = np.flip(safe_arange_with_edges(start, stop, step0)) + Y = np.zeros(shape=(X.shape[0], 1)) + + Y[: int(len_diff / 2)] = np.mean(Y0[:20]) + Y[int(len_diff / 2) : -int(len_diff / 2)] = Y0 + Y[-int(len_diff / 2) :] = np.mean(Y0[-20]) + + return X, Y + return X0, Y0 + + def plot_random(self, no_of_spectra): + """ + Randomly plot some of the generated spectra. + + Labels and simulation parameters are added as texts. + + Parameters + ---------- + no_of_spectra : int + No. of random spectra to be plotted. + + Returns + ------- + None. + + """ + no_of_spectra = min(no_of_spectra, self.no_of_simulations) + + random_numbers = [] + for _ in range(no_of_spectra): + r = np.random.randint(0, self.no_of_simulations) + while r in random_numbers: + # prevent repeating figures + r = np.random.randint(0, self.no_of_simulations) + random_numbers.append(r) + + row = self.df.iloc[r] + energy = row["x"] + intensity = row["y"] + title = "Simulated spectrum no. " + str(r) + fig = Figure(energy, intensity, title) + spectrum_text = self._write_spectrum_text(row) + fig.ax.text( + 0.1, + 0.9, + spectrum_text, + horizontalalignment="left", + verticalalignment="top", + transform=fig.ax.transAxes, + fontsize=7, + ) + plt.show() + + def _write_spectrum_text(self, df_row): + """ + Write the text for a spectrum that is plotted from a df row. + + Parameters + ---------- + df_row : pd.DataFrame + Row of a DataFrame created during simulation. + + Returns + ------- + None + + """ + linear_params_text = "" + for key in self.labels: + linear_params_text += ( + str(key) + ": " + str(np.round(df_row["label"][key], decimals=2)) + "\n" + ) + + params_text = "\n" + if df_row["FWHM"] is not None and df_row["FWHM"] != 0: + params_text += "FHWM: " + str(np.round(df_row["FWHM"], decimals=2)) + "\n" + else: + params_text += "FHWM: not changed" + "\n" + + if df_row["shift_x"] is not None and df_row["shift_x"] != 0: + params_text += "X shift: " + "{:.3f}".format(df_row["shift_x"]) + "\n" + else: + params_text += "X shift: none" + "\n" + + if df_row["noise"] is not None and df_row["noise"] != 0: + params_text += "S/N: " + "{:.1f}".format(df_row["noise"]) + "\n" + else: + params_text += "S/N: not changed" + "\n" + + scatter_text = "\n" + if df_row["scatterer"] is not None: + scatter_text += "Scatterer: " + str(df_row["scatterer"]) + "\n" + scatter_text += "Pressure: " + str(df_row["pressure"]) + " mbar" + "\n" + scatter_text += "Distance: " + str(df_row["distance"]) + " mm" + "\n" + + else: + scatter_text += "Scattering: none" + "\n" + + return linear_params_text + params_text + scatter_text + + def _prepare_metadata_after_run(self): + """ + Save the metadata from the simulation run in JSON file. + + Returns + ------- + None. + + """ + self.params["name"] = self.name + self.params["energy_range"] = [ + np.min(self.df["x"][0]), + np.max(self.df["x"][0]), + np.round(self.df["x"][0][0] - self.df["x"][0][1], 2), + ] + + +class FileWriter: + """Write creator data to file.""" + + def __init__(self, df, params): + self.df = df + self.params = params + self.name = self.params["name"] + self.labels = self.params["labels"] + + self.main_dir = str + self.excel_filepath = str + self.json_filepath = str + self.pkl_filepath = str + self.hdf5_filepath = str + + def to_file(self, filetypes, metadata=True): + """ + Create file from the dataframe of simulated spectra. + + Parameters + ---------- + filepath : str + Filepath of the output file. + filetype : str + Options: "excel", "json", "txt", "pickle" + Returns + ------- + None. + + """ + datafolder = os.path.join(*[self.params["output_datafolder"], self.name]) + try: + os.makedirs(datafolder) + except FileExistsError: + pass + + self.main_dir = os.path.join(datafolder, self.name) + + valid_filetypes = ["excel", "json", "pickle", "hdf5"] + + for filetype in filetypes: + if filetype not in valid_filetypes: + print("Saving was not successful. Choose a valid filetype!") + else: + self._save_to_file(self.df, self.main_dir, filetype) + print(f"Data was saved to {filetype.upper()} file.") + + if metadata: + self.save_metadata() + + def _save_to_file(self, df, filename, filetype): + """ + Save a dataframe to a file. + + Parameters + ---------- + df : pandas.DataFrame + Dataframe with the simulated data. + filename : str + Filename of the new file. + filetype : str + If "excel", save the data to an Excel file. + If "json", save the data to a JSON file. + If "excel", pickle the data and save it. + If "hdf5", call the helper method "prepare_hdf5" and store + the data in an HDF5 file. + + + Returns + ------- + None. + + """ + if filetype == "excel": + self.excel_filepath = filename + ".xlsx" + with pd.ExcelWriter(self.excel_filepath) as writer: + df.to_excel(writer, sheet_name=filename) + + if filetype == "json": + self.json_filepath = filename + ".json" + + with open(self.json_filepath, "w") as json_file: + df.to_json(json_file, orient="records") + + if filetype == "pickle": + self.pkl_filepath = filename + ".pkl" + with open(self.pkl_filepath, "wb") as pickle_file: + df.to_pickle(pickle_file) + + if filetype == "hdf5": + print("Saving data to HDF5...") + self.hdf5_filepath = filename + ".h5" + + hdf5_data = self.prepare_hdf5(self.df) + + with h5py.File(self.hdf5_filepath, "w") as h5_file: + for key, value in hdf5_data.items(): + try: + h5_file.create_dataset( + key, + data=value, + compression="gzip", + chunks=True, + ) + except TypeError: + value = np.array(value, dtype=object) + string_dt = h5py.special_dtype(vlen=str) + h5_file.create_dataset( + key, + data=value, + dtype=string_dt, + compression="gzip", + chunks=True, + ) + print("Saved " + key + " to HDF5 file.") + + def prepare_hdf5(self, df): + """ + Store the DataFrame from a simulation run in a dictionary. + + Parameters + ---------- + df : pd.DataFrame + Dataframe containing the result of a simulation process. + + Returns + ------- + dict + Dictionary containing all simulated data. + Items include X, y, shiftx, noise, FWHM, scatterer, + distance, and pressure, + + """ + X = [] + y = [] + shiftx = [] + noise = [] + fwhm = [] + scatterer = [] + distance = [] + pressure = [] + + energies = df["x"][0] + for index, row in df.iterrows(): + X_one = row["y"] + if self.params["eV_window"]: + warnings.simplefilter(action="ignore", category=SettingWithCopyWarning) + # Only select a random window of some eV as output. + step = self.params["energy_range"][-1] + eV_window = self.params["eV_window"] + window = int(eV_window / step) + 1 + r = np.random.randint(0, X_one.shape[0] - window) + X_one = X_one[r : r + window] + self.df["x"][index] = np.flip( + safe_arange_with_edges(0, eV_window, step) + ) + self.df["y"][index] = X_one + energies = self.df["x"][index] + y_one = row["label"] + shiftx_one = row["shift_x"] + noise_one = row["noise"] + fwhm_one = row["FWHM"] + scatterer_name = row["scatterer"] + scatterers = {"He": 0, "H2": 1, "N2": 2, "O2": 3} + try: + scatterer_one = scatterers[scatterer_name] + except KeyError: + scatterer_one = float("NAN") + distance_one = row["distance"] + pressure_one = row["pressure"] + + X.append(X_one) + y.append(y_one) + shiftx.append(shiftx_one) + noise.append(noise_one) + fwhm.append(fwhm_one) + scatterer.append(scatterer_one) + distance.append(distance_one) + pressure.append(pressure_one) + + print("Prepare HDF5 upload: " + str(index) + "/" + str(df.shape[0])) + + X = np.array(X, dtype=float) + try: + X = np.reshape(X, (X.shape[0], X.shape[1], -1)) + except IndexError as exc: + raise IndexError( + "Could not concatenate individual spectra because their" + "sizes are different. Either set 'ensure_same_length'" + "to True or 'eV_window' to a finite integer!" + ) from exc + + y = self._one_hot_encode(y) + + shiftx = np.reshape(np.array(shiftx), (-1, 1)) + noise = np.reshape(np.array(noise), (-1, 1)) + fwhm = np.reshape(np.array(fwhm), (-1, 1)) + scatterer = np.reshape(np.array(scatterer), (-1, 1)) + distance = np.reshape(np.array(distance), (-1, 1)) + pressure = np.reshape(np.array(pressure), (-1, 1)) + + return { + "X": X, + "y": y, + "shiftx": shiftx, + "noise": noise, + "FWHM": fwhm, + "scatterer": scatterer, + "distance": distance, + "pressure": pressure, + "energies": energies, + "labels": self.labels, + } + + def _one_hot_encode(self, y): + """ + One-hot encode the labels. + + As an example, if the label of a spectrum is Fe metal = 1 and all + oxides = 0, then the output will be np.array([1,0,0,0],1). + + Parameters + ---------- + y : list + List of label strings. + + Returns + ------- + new_labels : arr + One-hot encoded labels. + + """ + new_labels = np.zeros((len(y), len(self.labels))) + + for i, label_dict in enumerate(y): + for species, value in label_dict.items(): + number = self.labels.index(species) + new_labels[i, number] = value + + return new_labels + + def save_metadata(self): + """ + Save simulation parameters to JSON file. + + Returns + ------- + None. + + """ + json_filepath = self.main_dir + "_metadata.json" + + if self.params["eV_window"]: + self.params["energy_range"] = [ + np.min(self.df["x"][0]), + np.max(self.df["x"][0]), + np.round(self.df["x"][0][0] - self.df["x"][0][1], 2), + ] + + with open(json_filepath, "w") as out_file: + json.dump(self.params, out_file, indent=4) + + +def calculate_runtime(start, end): + """ + Calculate the runtime between two points. + + Parameters + ---------- + start : float + Start time, generated by start = time(). + end : float + Start time, generated by end = time(). + + Returns + ------- + runtime : str + Returns a string of the format hh:mm:ss:ff. + + """ + time = end - start + hours, rem = divmod(time, 3600) + minutes, seconds = divmod(rem, 60) + runtime = "{:0>2}:{:0>2}:{:05.2f}".format(int(hours), int(minutes), seconds) + + return runtime diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/__init__.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/default_params.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/default_params.json new file mode 100644 index 0000000000000000000000000000000000000000..4a0016b420695f95ca0855f43bed70bd9a420f1f --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/default_params.json @@ -0,0 +1,106 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"test", + "labels":[ + "Fe metal", + "FeO", + "Fe3O4", + "Fe2O3", + "Ni metal", + "NiO", + "Co metal", + "CoO", + "Co3O4" + ], + "spectra":[ + "Fe2p Fe metal", + "Fe2p FeO", + "Fe2p Fe3O4", + "Fe2p Fe2O3", + "Co2p Co metal", + "Co2p CoO", + "Co2p Co3O4", + "Ni2p Ni metal", + "Ni2p NiO", + "NiLMM Ni metal", + "NiLMM NiO", + "CoLMM Co metal", + "CoLMM CoO", + "CoLMM Co3O4", + "FeLMM Fe metal", + "FeLMM FeO", + "FeLMM Fe3O4", + "FeLMM Fe2O3" + ], + "input_filenames":{ + "reference_set_0":[ + "NiCoFe/Fe2p_Fe_metal.txt", + "NiCoFe/Fe2p_FeO.txt", + "NiCoFe/Fe2p_Fe3O4.txt", + "NiCoFe/Fe2p_Fe2O3.txt", + "NiCoFe/NiLMM_Ni_metal.txt", + "NiCoFe/NiLMM_NiO.txt", + "NiCoFe/CoLMM_Co_metal.txt", + "NiCoFe/CoLMM_CoO.txt", + "NiCoFe/CoLMM_Co3O4.txt", + "NiCoFe/FeLMM_Fe_metal.txt", + "NiCoFe/FeLMM_FeO.txt", + "NiCoFe/FeLMM_Fe3O4.txt", + "NiCoFe/FeLMM_Fe2O3.txt" + ], + "reference_set_1":[ + "NiCoFe/Co2p_Co_metal.txt", + "NiCoFe/Co2p_CoO.txt", + "NiCoFe/Co2p_Co3O4.txt", + "NiCoFe/NiLMM_Ni_metal.txt", + "NiCoFe/NiLMM_NiO.txt", + "NiCoFe/CoLMM_Co_metal.txt", + "NiCoFe/CoLMM_CoO.txt", + "NiCoFe/CoLMM_Co3O4.txt", + "NiCoFe/FeLMM_Fe_metal.txt", + "NiCoFe/FeLMM_FeO.txt", + "NiCoFe/FeLMM_Fe3O4.txt", + "NiCoFe/FeLMM_Fe2O3.txt" + ], + "reference_set_2":[ + "NiCoFe/Ni2p_Ni_metal.txt", + "NiCoFe/Ni2p_NiO.txt", + "NiCoFe/NiLMM_Ni_metal.txt", + "NiCoFe/NiLMM_NiO.txt", + "NiCoFe/CoLMM_Co_metal.txt", + "NiCoFe/CoLMM_CoO.txt", + "NiCoFe/CoLMM_Co3O4.txt", + "NiCoFe/FeLMM_Fe_metal.txt", + "NiCoFe/FeLMM_FeO.txt", + "NiCoFe/FeLMM_Fe3O4.txt", + "NiCoFe/FeLMM_Fe2O3.txt" + ] + }, + "no_of_simulations":500, + "single":false, + "variable_no_of_inputs":true, + "always_auger":true, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":true, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[-5,5], + "noise":[2,35], + "FWHM":[145,722], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[0.1,0.5], + "distance":[0.1,1] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core.json new file mode 100644 index 0000000000000000000000000000000000000000..2a2e89d25394eac4e4673a64203b8574f5cf3ef2 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core.json @@ -0,0 +1,60 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"CoFe_combined_without_auger_peaks", + "labels":[ + "Co metal", + "CoO", + "Co3O4", + "Fe metal", + "FeO", + "Fe3O4", + "Fe2O3" + ], + "spectra":[ + "Co2pFe2p Co metal", + "Co2pFe2p CoO", + "Co2pFe2p Co3O4", + "Co2pFe2p Fe metal", + "Co2pFe2p FeO", + "Co2pFe2p Fe3O4", + "Co2pFe2p Fe2O3" + ], + "input_filenames":{ + "reference_set_0":[ + "NiCoFe/Co2pFe2p_Fe_metal.txt", + "NiCoFe/Co2pFe2p_FeO.txt", + "NiCoFe/Co2pFe2p_Fe3O4.txt", + "NiCoFe/Co2pFe2p_Fe2O3.txt", + "NiCoFe/Co2pFe2p_Co_metal.txt", + "NiCoFe/Co2pFe2p_CoO.txt", + "NiCoFe/Co2pFe2p_Co3O4.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":false, + "normalize_outputs":true, + "broaden":false, + "shift_x":true, + "noise":true, + "scatter":false, + "sim_ranges":{ + "shift_x":[-5,5], + "noise":[4,35], + "FWHM":[145,722], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[0.1,0.5], + "distance":[0.1,1] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core_100eV_window.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core_100eV_window.json new file mode 100644 index 0000000000000000000000000000000000000000..07f0c53a411070ef97f296fa9c1ef6dcac63d233 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_CoFe_combined_core_100eV_window.json @@ -0,0 +1,67 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"CoFe_combined_without_auger_peaks_100eV_window", + "labels":[ + "Co metal", + "CoO", + "Co3O4", + "Fe metal", + "FeO", + "Fe3O4", + "Fe2O3" + ], + "spectra":[ + "Co2pFe2p Co metal", + "Co2pFe2p CoO", + "Co2pFe2p Co3O4", + "Co2pFe2p Fe metal", + "Co2pFe2p FeO", + "Co2pFe2p Fe3O4", + "Co2pFe2p Fe2O3", + "CoLMM Co metal", + "CoLMM CoO", + "CoLMM Co3O4", + "FeLMM Fe metal", + "FeLMM FeO", + "FeLMM Fe3O4", + "FeLMM Fe2O3" + ], + "input_filenames":{ + "reference_set_0":[ + "NiCoFe/Co2pFe2p_Fe_metal.txt", + "NiCoFe/Co2pFe2p_FeO.txt", + "NiCoFe/Co2pFe2p_Fe3O4.txt", + "NiCoFe/Co2pFe2p_Fe2O3.txt", + "NiCoFe/Co2pFe2p_Co_metal.txt", + "NiCoFe/Co2pFe2p_CoO.txt", + "NiCoFe/Co2pFe2p_Co3O4.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":100, + "normalize_inputs":false, + "normalize_outputs":true, + "broaden":false, + "shift_x":true, + "noise":true, + "scatter":false, + "sim_ranges":{ + "shift_x":[-5,5], + "noise":[4,35], + "FWHM":[145,722], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[0.1,0.5], + "distance":[0.1,1] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Co_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Co_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..f0c3542dd9ed84ed71506ae12d81fc7105cc81c6 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Co_core_small_gas_phase.json @@ -0,0 +1,63 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Co_linear_combination_small_gas_phase", + "labels":[ + "Co metal", + "CoO", + "Co3O4" + ], + "spectra":[ + "Co2p Co metal", + "Co2p CoO", + "Co2p Co3O4" + ], + "input_filenames":{ + "reference_set_0":[ + "Co2p_Co_metal.txt", + "Co2p_CoO.txt", + "Co2p_Co3O4.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Cu_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Cu_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..c65d1ae5dbed2f20f61ff8a97662557c5e1966e2 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Cu_core_small_gas_phase.json @@ -0,0 +1,63 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Cu_linear_combination_small_gas_phase", + "labels":[ + "Cu metal", + "Cu2O", + "CuO" + ], + "spectra":[ + "Cu2p Cu metal", + "Cu2p Cu2O", + "Cu2p CuO" + ], + "input_filenames":{ + "reference_set_0":[ + "Cu2p_Cu_metal.txt", + "Cu2p_Cu2O.txt", + "Cu2p_CuO.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Fe_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Fe_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..b1e789f5c3973a4b60139ee637cb8347fbcbcd02 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Fe_core_small_gas_phase.json @@ -0,0 +1,66 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Fe_linear_combination_small_gas_phase", + "labels":[ + "Fe metal", + "FeO", + "Fe3O4", + "Fe2O3" + ], + "spectra":[ + "Fe2p Fe metal", + "Fe2p FeO", + "Fe2p Fe3O4", + "Fe2p Fe2O3" + ], + "input_filenames":{ + "reference_set_0":[ + "Fe2p_Fe_metal.txt", + "Fe2p_FeO.txt", + "Fe2p_Fe3O4.txt", + "Fe2p_Fe2O3.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Mn_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Mn_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..c62a1fbc5a85cf95556542a094200d2b84e56934 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Mn_core_small_gas_phase.json @@ -0,0 +1,63 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Mn_linear_combination_small_gas_phase", + "labels":[ + "MnO", + "Mn2O3", + "MnO2" + ], + "spectra":[ + "Mn2p MnO", + "Mn2p Mn2O3", + "Mn2p MnO2" + ], + "input_filenames":{ + "reference_set_0":[ + "Mn2p_MnO.txt", + "Mn2p_Mn2O3.txt", + "Mn2p_MnO2.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_NiCoFe_combined_core.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_NiCoFe_combined_core.json new file mode 100644 index 0000000000000000000000000000000000000000..4f9e94a20fbf6930586a279681a260e4f7d82cef --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_NiCoFe_combined_core.json @@ -0,0 +1,81 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"NiCoFe_long_linear_combination_small_gas_phase", + "labels":[ + "Fe metal", + "FeO", + "Fe3O4", + "Fe2O3", + "Ni metal", + "NiO", + "Co metal", + "CoO", + "Co3O4" + ], + "spectra":[ + "Ni2pCo2pFe2p Ni metal", + "Ni2pCo2pFe2p NiO", + "Ni2pCo2pFe2p Co metal", + "Ni2pCo2pFe2p CoO", + "Ni2pCo2pFe2p Co3O4", + "Ni2pCo2pFe2p Fe metal", + "Ni2pCo2pFe2p FeO", + "Ni2pCo2pFe2p Fe3O4", + "Ni2pCo2pFe2p Fe2O3" + ], + "input_filenames":{ + "reference_set_0":[ + "NiCoFe/Ni2pCo2pFe2p_Ni_metal.txt", + "NiCoFe/Ni2pCo2pFe2p_NiO.txt", + "NiCoFe/Ni2pCo2pFe2p_Co_metal.txt", + "NiCoFe/Ni2pCo2pFe2p_CoO.txt", + "NiCoFe/Ni2pCo2pFe2p_Co3O4.txt", + "NiCoFe/Ni2pCo2pFe2p_Fe_metal.txt", + "NiCoFe/Ni2pCo2pFe2p_FeO.txt", + "NiCoFe/Ni2pCo2pFe2p_Fe3O4.txt", + "NiCoFe/Ni2pCo2pFe2p_Fe2O3.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":false, + "sim_ranges":{ + "shift_x":[ + -3, + 5 + ], + "noise":[ + 1, + 50 + ], + "FWHM":[ + 145, + 722 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ni_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ni_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..475ead10e18180f3cac7c333fc8b2b7f0018aa0e --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ni_core_small_gas_phase.json @@ -0,0 +1,60 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Ni_linear_combination_small_gas_phase", + "labels":[ + "Ni metal", + "NiO" + ], + "spectra":[ + "Ni2p Ni metal", + "Ni2p NiO" + ], + "input_filenames":{ + "reference_set_0":[ + "Ni2p_Ni_metal.txt", + "Ni2p_NiO.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Pd_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Pd_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..3de9b3fa888f6c997f0ae8645a7d2043916f2a9b --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Pd_core_small_gas_phase.json @@ -0,0 +1,60 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Pd_linear_combination_small_gas_phase", + "labels":[ + "Pd metal", + "PdO" + ], + "spectra":[ + "Pd3d Pd metal", + "Pd3d PdO" + ], + "input_filenames":{ + "reference_set_0":[ + "Pd3d_Pd_metal_narrow.txt", + "Pd3d_PdO_narrow.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ti_core_small_gas_phase.json b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ti_core_small_gas_phase.json new file mode 100644 index 0000000000000000000000000000000000000000..ccec543cc3fa73d085ae40119d1d42d324f630a2 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/params/init_params_Ti_core_small_gas_phase.json @@ -0,0 +1,66 @@ +{ + "output_datafolder":"C:\\Users\\pielsticker\\Simulations\\", + "name":"Ti_linear_combination_small_gas_phase", + "labels":[ + "Ti metal", + "TiO", + "Ti2O3", + "TiO2" + ], + "spectra":[ + "Ti2p Ti metal", + "Ti2p TiO", + "Ti2p Ti2O3", + "Ti2p TiO2" + ], + "input_filenames":{ + "reference_set_0":[ + "Ti2p_Ti_metal.txt", + "Ti2p_TiO.txt", + "Ti2p_Ti2O3.txt", + "Ti2p_TiO2.txt" + ] + }, + "no_of_simulations":250000, + "single":false, + "variable_no_of_inputs":true, + "always_auger":false, + "always_core":true, + "same_auger_core_percentage":true, + "ensure_same_length":false, + "eV_window":null, + "normalize_inputs":true, + "normalize_outputs":true, + "broaden":true, + "shift_x":true, + "noise":true, + "scatter":true, + "sim_ranges":{ + "shift_x":[ + -3, + 3 + ], + "noise":[ + 3, + 35 + ], + "FWHM":[ + 0.5, + 2.0 + ], + "scatterers":{ + "0":"He", + "1":"H2", + "2":"N2", + "3":"O2" + }, + "pressure":[ + 0.1, + 0.5 + ], + "distance":[ + 0.1, + 1 + ] + } +} \ No newline at end of file diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/run.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/run.py new file mode 100644 index 0000000000000000000000000000000000000000..2ba514e121b15345414208288bc2b767a4ed8535 --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/run.py @@ -0,0 +1,142 @@ +# +# Copyright the xpsdeeplearning authors. +# +# This file is part of xpsdeeplearning. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +This script is used to simulate many spectra using the Creator class +and save the data to a HDF5 file, along with the metadata in a JSON +file. +""" + +from time import time +import os +import sys +import json +import pandas as pd +import click + +from xpsdeeplearning.simulation.creator import ( + Creator, + FileWriter, + calculate_runtime, +) + + +def simulate( + param_file: str, reload_from_previous_folder: str = None, plot: bool = True +): + """Create multiple sets of similar spectra with the same settings.""" + with open(param_file, "r") as file: + params = json.load(file) + params["init_param_filepath"] = param_file + + creator = Creator(params) + + if reload_from_previous_folder: + pkl_filename = "" + dataset_name = "" + pkl_filepath = os.path.join(reload_from_previous_folder, pkl_filename) + + creator.df = pd.read_pickle(pkl_filepath) + + # Reload parameters . + param_filename = dataset_name + "_metadata.json" + param_filepath = os.path.join(reload_from_previous_folder, param_filename) + + with open(param_filepath, "r") as param_file_reload: # type: ignore + creator.params = json.load(param_file_reload) # type: ignore + + t0_run = time() + _ = creator.run() + t1_run = time() + + if plot: + creator.plot_random(10) + + t0_save = time() + writer = FileWriter(creator.df, creator.params) + writer.to_file(filetypes=["pickle"], metadata=False) + writer.to_file(filetypes=["hdf5"], metadata=True) + t1_save = time() + + os.remove(writer.pkl_filepath) + print(f"Runtime: {calculate_runtime(t0_run, t1_run)}.") + print(f"HDF5 save runtime: {calculate_runtime(t0_save, t1_save)}.") + + +@click.command() +@click.option( + "--param-file", + default=None, + required=True, + help="The path to the input parameter file to read.", +) +@click.option( + "--reload-from-previous-folder", + default=None, + help="The path to a previous run which is to be continued.", +) +def simulate_cli( + param_file: str, + reload_from_previous_folder: str, +): + """The CLI entrypoint for the convert function""" + try: + simulate(param_file, reload_from_previous_folder, plot=False) + except KeyError as exc: + sys.tracebacklimit = 0 + raise KeyError( + ( + "Please make sure you have these entries in your " + "parameter file:\n" + "output_datafolder: str\n" + "name: str\n" + "labels: list\\n" + "spectra: list\n" + "input_filenames: Dict\n" + "no_of_simulations: int\n" + "single: bool\n" + "variable_no_of_inputs: bool\n" + "always_auger: bool\n" + "always_core: bool\n" + "same_auger_core_percentage: bool\n" + "ensure_same_length: bool\n" + "eV_window: int\n" + "normalize_inputs: bool\n" + "normalize_outputs: bool\n" + "broaden: bool\n" + "shift_x: bool\n" + "noise: bool\n" + "scatter: bool\n" + "sim_ranges: Dict with the keys\n" + "\t shift_x: List[float]\n" + "\t noise: List[int]\n" + "\t FWHM: List[float]\n" + "\t scatterers: Dict[int, str]\n" + "\t pressure: List[float]\n" + "\t distance: List[float]\n" + ) + ) from exc + + +if __name__ == "__main__": + working_dir = os.path.join(os.path.abspath(__file__).split("deepxps")[0], "deepxps") + os.chdir(working_dir) + # Change the following two lines according to your folder structure ### + init_param_folder = r"C:/Users/pielsticker/Lukas/MPI-CEC/Projects/deepxps/xpsdeeplearning/xpsdeeplearning/simulation/params/" + init_param_filename = "init_params_Co_core_small_gas_phase.json" + init_param_filepath = os.path.join(init_param_folder, init_param_filename) + simulate(init_param_filepath) diff --git a/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/sim.py b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/sim.py new file mode 100644 index 0000000000000000000000000000000000000000..8def58418807b39b9090b24601443a84b8698c5e --- /dev/null +++ b/material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/xpsdeeplearning/simulation/sim.py @@ -0,0 +1,314 @@ +# +# Copyright the xpsdeeplearning authors. +# +# This file is part of xpsdeeplearning. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Simulate a single spectrum from reference spectra. +""" + +import numpy as np + +from xpsdeeplearning.simulation.base_model.figures import Figure +from xpsdeeplearning.simulation.base_model.spectra import SimulatedSpectrum + + +class Simulation: + """Basic class for simulating a spectrum from input spectra.""" + + def __init__(self, input_spectra): + """ + Initialize the input spectra and an empty SimulatedSpectrum. + + The x-range for the output spectrum is originally the same as + the first input spectrum. + + The main methods for simulation are: + - Linear combination of the input spectra + - changes to the resolution, S/N ratio and x-axis of a spectrum + as well as simulation of gas phase scattering + - Plotting of the input and the simulated spectrum + + Parameters + ---------- + input_spectra : list + List of instances of the MeasuredSpectrum class. + + Returns + ------- + None. + + """ + self.input_spectra = input_spectra + self.core_spectra = [ + s for s in self.input_spectra if s.spectrum_type == "core_level" + ] + + self.auger_spectra = [ + s for s in self.input_spectra if s.spectrum_type == "auger" + ] + + # Initialize the axes and label to the spectrum loaded first. + input_spectrum = input_spectra[0] + while input_spectrum.spectrum_type != "core_level": + # Only use parameters of core level spectra. + input_spectrum = next(iter(input_spectra)) + + start = input_spectrum.start + stop = input_spectrum.stop + step = input_spectrum.step + label = "" + + self.output_spectrum = SimulatedSpectrum(start, stop, step, label) + + def combine_linear(self, scaling_params): + """ + Perform a linear combination of the input spectra. + + Each spectrum is scaled by a parameter in the range of [0,1]. + All scaling parameter have to add up to 1. + + Parameters + ---------- + scaling_params : list + Parameter list of float values to scale the input spectra. + The length of the scaling_params list has to be the same + as the number of spectra for the linear combination. + + Returns + ------- + None. + + """ + sim_spectra = self.core_spectra + + shifted_auger_spectra = self._position_augers_randomly( + self.output_spectrum.x, self.auger_spectra + ) + sim_spectra.extend(shifted_auger_spectra) + + # Make sure that the right amount of params is given. + if len(sim_spectra) < len(scaling_params): + print("Please supply the correct amount of scaling parameters.") + print("Simulated spectrum was not changed!") + + elif len(sim_spectra) > len(scaling_params): + print("Please supply enough scaling parameters.") + print("Simulated spectrum was not changed!") + + else: + self.output_spectrum.label = {} + if np.round(sum(scaling_params), decimals=1) == 1.0: + output_list = [] + + for i, sim_spectrum in enumerate(sim_spectra): + # Species = List of input spectra names + species = list(sim_spectrum.label.keys())[0] + concentration = scaling_params[i] + + intensity = sim_spectrum.lineshape * scaling_params[i] + output_list.append(intensity) + + # For each species, the label gets a new key:value + # pair of the format species: concentration + self.output_spectrum.label[species] = concentration + + # Linear combination + self.output_spectrum.lineshape = sum(output_list) + + else: + print("Scaling parameters have to sum to 1!") + print("Simulated spectrum was not changed!") + + def _position_augers_randomly( + self, + x, + auger_spectra, + shift_x=None, + ): + """ + Randomly position auger spectra on x. + + Parameters + ---------- + auger_spectra : list + A list of MeasuredSpectrum objects of + spectrum_type "auger". + shift_x : float, optional + Integer describing by how many eV the spectrum shall + be shifted. If None, then the spectrum is shifted by + a random amount. The default is None. + max_shift : float, optional + Max . The default is 5. + + Returns + ------- + spectrum : MeasuredSpectrum + The auger spectrum shifted horizontally. + + """ + start = np.min(x) + stop = np.max(x) + + x1 = np.roll(x, -1) + diff = np.abs(np.subtract(x, x1)) + step = np.round(np.min(diff[diff != 0]), 3) + x = x[diff != 0] + + max_shift = int((np.max(x) - np.mean(x))) + + shift_range = np.arange(-max_shift, max_shift, step) + if not shift_x: + # Shift by random amount + r = np.round(np.random.randint(0, len(shift_range)), decimals=2) + shift_x = np.round(shift_range[r], 3) + + if -step < shift_x < step: + shift_x = 0 + + shifted_auger_spectra = [] + + for auger_spectrum in auger_spectra: + shifted_auger_spectrum = SimulatedSpectrum( + start, stop, step, auger_spectrum.label + ) + + # Position auger spectrum in the middle of the window + m = int(np.where(auger_spectrum.x == np.mean(auger_spectrum.x))[0]) + n = int(np.where(x == np.mean(x))[0]) + if n > m: + shifted_auger_spectrum.lineshape[n - m : n + m] = ( + auger_spectrum.lineshape[: 2 * m] + ) + shifted_auger_spectrum.lineshape[: n - m] = auger_spectrum.lineshape[0] + shifted_auger_spectrum.lineshape[n + m :] = auger_spectrum.lineshape[ + 2 * m + ] + elif n < m: + shifted_auger_spectrum.lineshape = auger_spectrum.lineshape[ + m - n : n + m + 1 + ] + + shifted_auger_spectrum.shift_horizontal(shift_x) + shifted_auger_spectra.append(shifted_auger_spectrum) + + # print(f"Auger spectra horizontally shifted by {shift_x} eV.") + return shifted_auger_spectra + + def change_spectrum(self, spectrum=None, **kwargs): + """ + Simulate artificial changes on a SimulatedSpectrum object. + + Parameters + ---------- + spectrum : Spectrum, optional + A Spectrum object can be supplied if one wants to change a + single input spectrum and not change a spectrum that was + already created using a linear combination. + If spectrum == None,then the current output spectrum is + changed. The default is None. + **kwargs : + resolution: int + To perform a convolution of the spectrum with a + Gaussian with FWHM = resolution/mean(x) where x is the + x-axis of the spectrum. + signal_to_noise: int + To add poisson-distributed noise at to the spectrum. + Signal-to-noise describes the S/N ratio of the + resulting spectrum. + shift_x: int + To shift the spectrum by some eV. + scatterer: dict + To simulate scattering in a scattering medium defined + in the dictionary of the format {"label" : str, + "distance" : float, + "pressure" : float}. + "label" is the name of the scatterer. + Allowed values: "default", "H2", "He", "O2", "N2." + + Returns + ------- + None. + + """ + if spectrum is not None: + # The step width is defined by the measured spectrum. + # The output spectrum needs to have its step widths + # redefined. + self.output_spectrum.lineshape = spectrum.lineshape + start = spectrum.start + stop = spectrum.stop + step = spectrum.step + self.label = spectrum.label + self.output_spectrum.x = np.flip(np.arange(start, stop + step, step)) + else: + pass + + if "fwhm" in kwargs.keys(): + self.output_spectrum.resolution = kwargs["fwhm"] + self.output_spectrum.change_resolution(kwargs["fwhm"]) + + if "shift_x" in kwargs.keys(): + self.output_spectrum.shift_x = kwargs["shift_x"] + self.output_spectrum.shift_horizontal(kwargs["shift_x"]) + + if "signal_to_noise" in kwargs.keys(): + self.output_spectrum.signal_to_noise = kwargs["signal_to_noise"] + self.output_spectrum.add_noise(kwargs["signal_to_noise"]) + + if "scatterer" in kwargs.keys(): + scatter_dict = kwargs["scatterer"] + self.output_spectrum.scatterer = scatter_dict["label"] + self.output_spectrum.distance = scatter_dict["distance"] + self.output_spectrum.pressure = scatter_dict["pressure"] + + self.output_spectrum.scatter_in_gas( + scatter_dict["label"], + scatter_dict["distance"], + scatter_dict["pressure"], + ) + + def plot_simulation(self, plot_inputs=False): + """ + Create Figure objects for the output spectrum. + + Optionally, the input spectra can also be plotted. + + Parameters + ---------- + plot_inputs : bool, optional + If plot_inputs, the input spectra are also plotted. + Otherwise, only the output spectrum is plotted. + The default is False. + + Returns + ------- + None. + + """ + if plot_inputs: + figs_input = [] + for spectrum in self.input_spectra: + x = spectrum.x + y = spectrum.lineshape + title = next(iter(spectrum.label)) + fig_input = Figure(x, y, title=title) + figs_input.append(fig_input) + + Figure( + self.output_spectrum.x, + self.output_spectrum.lineshape, + title=self.output_spectrum.spectrum_type, + )