id
stringlengths
1
265
text
stringlengths
6
5.19M
dataset_id
stringclasses
7 values
/Mopidy-Mixcloud-3.1415.9265.tar.gz/Mopidy-Mixcloud-3.1415.9265/mopidy_mixcloud/util.py
from .cache import Cache import time from mopidy.models import Image class LocalData: def __init__(self): self.users=[] self.default_users=[] self.tags=[] self.default_tags=[] self.search_max=20 self.images=Cache() # uri -> [Image] self.tracks=Cache() # uri -> Track self.refs=Cache() # uri -> list of Ref self.searches=Cache() # uri -> SearchResult self.playlists=Cache() #uri -> Playlist self.lookup=Cache() #uri -> [Track] self.urls=Cache() #uri -> url self.refresh_period=600 #10 min self.last_refresh_time=0 self.ignore_exclusive=True def from_config(self,config): cfg=config['mixcloud']['users'] if cfg: self.default_users=cfg.split(',') self.default_users=[x.strip() for x in self.default_users] else: self.default_users=[] self.users=self.default_users[:] cfg=config['mixcloud']['tags'] if cfg: self.default_tags=cfg.split(',') self.default_tags=[x.strip() for x in self.default_tags] else: self.default_tags=[] self.tags=self.default_tags[:] cfg=config['mixcloud']['search_max'] if cfg: self.search_max=cfg cfg=config['mixcloud']['refresh_period'] if cfg: self.refresh_period=cfg cfg=config['mixcloud']['ignore_exclusive'] if cfg is not None: self.ignore_exclusive=cfg def refresh(self): t=time.time() if t==0 or (t-self.last_refresh_time) > self.refresh_period: self.last_refresh_time=t self.clear() def clear(self,reset_users=False): self.tracks.clear() self.refs.clear() self.searches.clear() self.playlists.clear() self.lookup.clear() self.urls.clear() if reset_users: self.users=self.default_users[:] self.tags=self.default_tags[:] # thumbnails have a bit of a fiddly handling def add_thumbnail(self,jsdict,uri): pics = [] if 'pictures' in jsdict: if 'medium' in jsdict['pictures']: pics.append(Image(uri=jsdict['pictures']['medium'])) elif 'large' in jsdict['pictures']: pics.append(Image(uri=jsdict['pictures']['large'])) elif 'thumbnail' in jsdict['pictures']: pics.append(Image(uri=jsdict['pictures']['thumbnail'])) if len(pics)>0: self.images.add(uri,pics) return pics class MixcloudException(Exception): def __init__(self, value): self.parameter = value def __str__(self): return repr(self.parameter)
PypiClean
/Buildpan_CLI-1.0-py3-none-any.whl/buildpan/bitbucket_webhook.py
import requests import requests, datetime from buildpan import create_file, installer from buildpan import setting, access_token info = setting.info fetch_log = info["FETCH_LOG_URL"] host = info["HOST"] def bitbucket(project_id, path, refresh_token, username, repo_name, provider): try: print("\nCreating a webhook...") token = access_token.access_token(refresh_token) header = { "Authorization": 'Bearer ' + token, 'Content-Type': 'application/json', } curtime = datetime.datetime.now() # creating a webhook on a particular repository hook_url = f"https://api.bitbucket.org/2.0/repositories/{username}/{repo_name}/hooks" payload_url = f"http://{host}/bit_webhook" response3 = requests.get(hook_url, headers=header) data = response3.json()['values'] hook_body = { "description": "Webhook Description", "url": payload_url, "active": True, "events": [ "repo:push", "repo:commit_comment_created", ] } uuid = None for value in data: url = value["url"] if payload_url != url or len(data) == 0: requests.post(hook_url, headers=header, json=hook_body) print("Creating webhook done") requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=webhook created for repository - '+repo_name+'&status=success&operation=webhook') installer.installer(project_id, repo_name, username) # create file create_file.create_file(project_id, repo_name, path, username, provider) print("\nInit operation done.") break else: uuid = data[0]["uuid"][1:-1] url4 = hook_url + f"/{uuid}" requests.put(url4, headers=header, json=hook_body) print("Webhook already exists") requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=Exception: webhook already exists for repository - '+repo_name +'&status=success&operation=webhook') installer.installer(project_id, repo_name, username) # create file create_file.create_file(project_id, repo_name, path, username, provider) print("\nInit operation done.") break except Exception as e: print("Initialization failed") requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=Exception: webhook creation failed for repository - '+repo_name+ '. '+ str(e)+'&status=failed&operation=webhook')
PypiClean
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/typescript/lib/lib.es2020.date.d.ts
/// <reference lib="es2020.intl" /> interface Date { /** * Converts a date and time to a string by using the current or specified locale. * @param locales A locale string, array of locale strings, Intl.Locale object, or array of Intl.Locale objects that contain one or more language or locale tags. If you include more than one locale string, list them in descending order of priority so that the first entry is the preferred locale. If you omit this parameter, the default locale of the JavaScript runtime is used. * @param options An object that contains one or more properties that specify comparison options. */ toLocaleString(locales?: Intl.LocalesArgument, options?: Intl.DateTimeFormatOptions): string; /** * Converts a date to a string by using the current or specified locale. * @param locales A locale string, array of locale strings, Intl.Locale object, or array of Intl.Locale objects that contain one or more language or locale tags. If you include more than one locale string, list them in descending order of priority so that the first entry is the preferred locale. If you omit this parameter, the default locale of the JavaScript runtime is used. * @param options An object that contains one or more properties that specify comparison options. */ toLocaleDateString(locales?: Intl.LocalesArgument, options?: Intl.DateTimeFormatOptions): string; /** * Converts a time to a string by using the current or specified locale. * @param locales A locale string, array of locale strings, Intl.Locale object, or array of Intl.Locale objects that contain one or more language or locale tags. If you include more than one locale string, list them in descending order of priority so that the first entry is the preferred locale. If you omit this parameter, the default locale of the JavaScript runtime is used. * @param options An object that contains one or more properties that specify comparison options. */ toLocaleTimeString(locales?: Intl.LocalesArgument, options?: Intl.DateTimeFormatOptions): string; }
PypiClean
/Mongo_Thingy-0.17.0-py3-none-any.whl/mongo_thingy/__init__.py
import asyncio import warnings from collections.abc import Mapping from pymongo import MongoClient, ReturnDocument from pymongo.errors import ConfigurationError from thingy import DatabaseThingy, classproperty, registry from mongo_thingy.cursor import AsyncCursor, Cursor try: from motor.motor_tornado import MotorClient except ImportError: # pragma: no cover MotorClient = None try: from motor.motor_asyncio import AsyncIOMotorClient except ImportError: # pragma: no cover AsyncIOMotorClient = None class ThingyList(list): def distinct(self, key): def __get_value(item): if isinstance(item, BaseThingy): item = item.view() return item.get(key) values = set(__get_value(item) for item in self) return list(values) def view(self, name="defaults"): def __view(item): if not isinstance(item, BaseThingy): raise TypeError(f"Can't view type {type(item)}.") return item.view(name) return [__view(item) for item in self] class BaseThingy(DatabaseThingy): """Represents a document in a collection""" _client = None _client_cls = None _collection = None _collection_name = None _cursor_cls = None _result_cls = ThingyList @classproperty def _table(cls): return cls._collection @classproperty def _table_name(cls): return cls._collection_name @classproperty def table_name(cls): return cls.collection_name @classproperty def collection(cls): return cls.get_collection() @classproperty def collection_name(cls): return cls.get_table_name() @classproperty def client(cls): return cls.get_client() @classmethod def _get_client(cls, database): return database.client @classmethod def _get_database(cls, collection, name): if collection is not None: return collection.database if cls._client and name: return cls._client[name] raise AttributeError("Undefined database.") @classmethod def _get_table(cls, database, table_name): return database[table_name] @classmethod def _get_database_name(cls, database): return database.name @classmethod def _get_table_name(cls, table): return table.name @classmethod def get_client(cls): if cls._client: return cls._client return cls._get_client(cls.database) @classmethod def get_collection(cls): return cls.get_table() @classmethod def add_index(cls, keys, **kwargs): kwargs.setdefault("background", True) if not hasattr(cls, "_indexes"): cls._indexes = [] cls._indexes.append((keys, kwargs)) @classmethod def count_documents(cls, filter=None, *args, **kwargs): if filter is None: filter = {} return cls.collection.count_documents(filter, *args, **kwargs) @classmethod def count(cls, filter=None, *args, **kwargs): warnings.warn( "count is deprecated. Use count_documents instead.", DeprecationWarning ) return cls.count_documents(filter=filter, *args, **kwargs) @classmethod def connect(cls, *args, client_cls=None, database_name=None, **kwargs): if not client_cls: client_cls = cls._client_cls cls._client = client_cls(*args, **kwargs) try: cls._database = cls._client.get_database(database_name) except (ConfigurationError, TypeError): cls._database = cls._client["test"] @classmethod def disconnect(cls, *args, **kwargs): if cls._client: cls._client.close() cls._client = None cls._database = None @classmethod def distinct(cls, *args, **kwargs): return cls.collection.distinct(*args, **kwargs) @classmethod def find(cls, *args, view=None, **kwargs): delegate = cls.collection.find(*args, **kwargs) return cls._cursor_cls(delegate, thingy_cls=cls, view=view) @classmethod def find_one(cls, filter=None, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} cursor = cls.find(filter, *args, **kwargs) return cursor.first() @classmethod def delete_many(cls, filter=None, *args, **kwargs): return cls.collection.delete_many(filter, *args, **kwargs) @classmethod def delete_one(cls, filter=None, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} return cls.collection.delete_one(filter, *args, **kwargs) @classmethod def update_many(cls, filter, update, *args, **kwargs): return cls.collection.update_many(filter, update, *args, **kwargs) @classmethod def update_one(cls, filter, update, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} return cls.collection.update_one(filter, update, *args, **kwargs) @property def id(self): return self.__dict__.get("id") or self._id @id.setter def id(self, value): if "id" in self.__dict__: self.__dict__["id"] = value else: self._id = value def delete(self): return self.get_collection().delete_one({"_id": self.id}) class Thingy(BaseThingy): _client_cls = MongoClient _cursor_cls = Cursor @classmethod def create_index(cls, keys, **kwargs): cls.collection.create_index(keys, **kwargs) @classmethod def create_indexes(cls): if hasattr(cls, "_indexes"): for keys, kwargs in cls._indexes: cls.create_index(keys, **kwargs) @classmethod def find_one_and_replace(cls, filter, replacement, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} kwargs.setdefault("return_document", ReturnDocument.AFTER) result = cls.collection.find_one_and_replace( filter, replacement, *args, **kwargs ) if result is not None: return cls(result) @classmethod def find_one_and_update(cls, filter, update, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} kwargs.setdefault("return_document", ReturnDocument.AFTER) result = cls.collection.find_one_and_update(filter, update, *args, **kwargs) if result is not None: return cls(result) def save(self, force_insert=False, refresh=False): data = self.__dict__ collection = self.get_collection() if self.id is not None and not force_insert: filter = {"_id": self.id} collection.replace_one(filter, data, upsert=True) else: collection.insert_one(data) if refresh: self.__dict__ = collection.find_one(self.id) return self class AsyncThingy(BaseThingy): _client_cls = MotorClient or AsyncIOMotorClient _cursor_cls = AsyncCursor @classmethod async def create_index(cls, keys, **kwargs): await cls.collection.create_index(keys, **kwargs) @classmethod async def create_indexes(cls): if hasattr(cls, "_indexes"): for keys, kwargs in cls._indexes: await cls.create_index(keys, **kwargs) @classmethod async def find_one_and_replace(cls, filter, replacement, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} kwargs.setdefault("return_document", ReturnDocument.AFTER) result = await cls.collection.find_one_and_replace( filter, replacement, *args, **kwargs ) if result is not None: return cls(result) @classmethod async def find_one_and_update(cls, filter, update, *args, **kwargs): if filter is not None and not isinstance(filter, Mapping): filter = {"_id": filter} kwargs.setdefault("return_document", ReturnDocument.AFTER) result = await cls.collection.find_one_and_update( filter, update, *args, **kwargs ) if result is not None: return cls(result) async def save(self, force_insert=False, refresh=False): data = self.__dict__ collection = self.get_collection() if self.id is not None and not force_insert: filter = {"_id": self.id} await collection.replace_one(filter, data, upsert=True) else: await collection.insert_one(data) if refresh: self.__dict__ = await collection.find_one(self.id) return self def connect(*args, **kwargs): if AsyncThingy._client_cls is not None: AsyncThingy.connect(*args, **kwargs) Thingy.connect(*args, **kwargs) def disconnect(*args, **kwargs): Thingy.disconnect(*args, **kwargs) AsyncThingy.disconnect(*args, **kwargs) def create_indexes(): """Create indexes registered on all :class:`Thingy`""" tasks = [] for cls in registry: if issubclass(cls, Thingy): cls.create_indexes() if issubclass(cls, AsyncThingy): coroutine = cls.create_indexes() task = asyncio.create_task(coroutine) tasks.append(task) if tasks: return asyncio.wait(tasks) __all__ = ["AsyncThingy", "Thingy", "connect", "create_indexes"]
PypiClean
/MVTS-Analyzer-0.0.2.tar.gz/MVTS-Analyzer-0.0.2/mvts_analyzer/windows/main_window.py
import importlib import importlib.util import logging import os import sys import traceback import typing import matplotlib from PySide6 import QtCore, QtGui, QtWidgets import mvts_analyzer.res.app_resources_rc # pylint: disable=unused-import #type: ignore from mvts_analyzer.graphing.graph_data import GraphData from mvts_analyzer.graphing.graph_settings_controller import \ GraphSettingsController from mvts_analyzer.graphing.graph_settings_model import GraphSettingsModel from mvts_analyzer.graphing.graph_settings_view import GraphSettingsView from mvts_analyzer.graphing.plotter.plot_wrapper import QPlotter from mvts_analyzer.ui.main_window_ui import Ui_MainWindow from mvts_analyzer.utility.gui_utility import create_qt_warningbox from mvts_analyzer.windows.apply_python_window import ApplyPythonWindow from mvts_analyzer.windows.merge_column_window import MergeColumnWindow from mvts_analyzer.windows.rename_label_window import RenameLabelWindow matplotlib.use('Qt5Agg') log = logging.getLogger(__name__) class MainWindow(QtWidgets.QMainWindow): """The main window from which all the other windows can be accessed""" def __init__(self, graph_model_args = None, graph_settings_model_args = None, settings_path = None, python_appliables_path = None, **kwargs ): """ graph_model_args (dict) : Arguments to pass to the graph data model graph_settings_model_args (dict) : Arguments to pass to the graph settings model (e.g. what columns to show initially) settings_path (str) : Optional path to where the application settings should be stored, if None, use default loc python_appliables_path (str) : Optional path to where the python appliables are stored, if None, use default path """ super(MainWindow, self).__init__(**kwargs) if graph_model_args is None: graph_model_args = {} if graph_settings_model_args is None: graph_settings_model_args = {} log.debug("Initializing main window") self.ui = Ui_MainWindow() #pylint: disable=invalid-name self.ui.setupUi(self) #========= Settings ================ settings_dir = settings_path if settings_path is not None: settings_dir = os.path.dirname(settings_path) if settings_path is None or not os.path.exists(settings_dir): #type: ignore log.info("Loading/Saving settings from/to default location") self._settings = QtCore.QSettings("MVTS-Tools", "MVTS-Analyzer") else: log.info(f"Loading settings from {settings_path}") self._settings = QtCore.QSettings(settings_path, QtCore.QSettings.Format.IniFormat) self.restoreGeometry(self._settings.value( "window_geometry", self.saveGeometry(), type=QtCore.QRect)) # type: ignore new_window_state = self._settings.value("window_state", self.windowState()) if new_window_state != QtCore.Qt.WindowState.WindowNoState: self.restoreState(new_window_state) # type: ignore if python_appliables_path is None: self._python_appliables_path : typing.Optional[str] = \ self._settings.value("python_appliables_path", None) #type: ignore if self._python_appliables_path is None: cur_path = os.path.dirname(os.path.realpath(__file__)) self._python_appliables_path = os.path.join(cur_path, "..", "python_appliables") else: log.info(f"Loading settings from {python_appliables_path}") if not os.path.exists(python_appliables_path): raise FileNotFoundError(f"Could not find python appliables path {python_appliables_path}") elif not os.path.isdir(python_appliables_path): raise FileNotFoundError(f"Provided python appliables path {python_appliables_path} is not a directory") #Launch in (semi) fullscreen mode #=========graph_tab================= self.graph_view_windows = [] self.plot_widget = QtWidgets.QWidget() #the main plot tab self.graph_data_model = GraphData(**graph_model_args) self.graph_settings_model = GraphSettingsModel(**graph_settings_model_args) #Create model self.plotter = QPlotter(self.graph_data_model, self.graph_settings_model) self.graph_view = GraphSettingsView(self.plotter) # Create View (using created plotter) self.setCentralWidget(self.graph_view) self.graph_controller = GraphSettingsController( self.graph_data_model, self.graph_settings_model, self.graph_view, self.plotter) self.menu_actions = [] #============== Python window ================ self.apply_python_window = None self.label_rename_window = None self.label_column_merge_tool = None #=============== Toolbar buttons ==================== self.ui.actionSave_As.triggered.connect(self.graph_controller.save_df_popup) self.ui.actionLoad_From_File.triggered.connect(self.graph_controller.load_df_popup) self.ui.actionQuit.triggered.connect(self.close) self.ui.actionRename_Label.triggered.connect(self.open_label_rename_window) self.ui.actionPython_Code.triggered.connect(self.open_python_window) self.ui.actionSave_Figure_As.triggered.connect(self.plotter.canvas.open_save_popup) self.ui.actionCopy_Figure_To_Clipboard.triggered.connect(self.graph_controller.copy_plot_to_clipboard) self.ui.actionAppend_From_File.triggered.connect(self.graph_controller.append_df_from_file) self.ui.actionOpenMergeLabelColumnWindow.triggered.connect(self.open_merge_label_column_window) self.ui.actionOpen_View_Copy.triggered.connect(lambda x: self.open_view_copy()) self.ui.actionHide_All_But_Selection.triggered.connect(self.graph_data_model.hide_all_datapoints_except_selection) self.ui.actionHide_Selection.triggered.connect(self.graph_data_model.hide_selection) self.ui.actionUnhide_All.triggered.connect(self.graph_data_model.unhide_all_datapoints) self.ui.actionSwitch_Hidden.triggered.connect(self.graph_data_model.flip_hidden) self.ui.actionSave_Not_Hidden_Only_As.triggered.connect(self.graph_controller.save_df_not_hidden_only_popup) self.ui.actionReplot.triggered.connect(self.graph_controller.plotter_replot) self.ui.actionReplot_View.triggered.connect(self.graph_controller.set_xlim_to_view) self.ui.actionReplot_View.triggered.connect(self.graph_controller.plotter_replot) self.ui.actionReplot_View_FFT.triggered.connect(self.graph_controller.set_fft_lim_to_view) self.ui.actionReplot_View_FFT.triggered.connect(self.graph_controller.plotter_replot) self.ui.actionReset_Domain.triggered.connect(self.graph_controller.reset_plot_domain) self.ui.actionReset_Domain.triggered.connect(self.graph_controller.plotter_replot) self.ui.actionReset_View_Settings.triggered.connect(self.graph_controller.reset_plot_settings) self.ui.actionReset_View_Settings.triggered.connect(self.graph_controller.plotter_replot) self.ui.actionSave_Selection_As.triggered.connect(self.graph_controller.save_df_selection_only_popup) #================ Live Window ========== self.live_window = None #================= Create Python appliable links self.recreate_python_appliable_menu() def get_python_appliables_path(self): """Return the path to the python appliables folder""" return self._python_appliables_path def set_python_appliables_path(self, new_path): """Set the path to the python appliables folder""" if new_path != self._python_appliables_path: #If change self._python_appliables_path = new_path self.recreate_python_appliable_menu() #Recreate the menu def save_settings(self): """ Save the app-settings to the settings file """ log.info("Saving settings") self._settings.setValue("window_geometry", self.saveGeometry()) self._settings.setValue("window_state", self.saveState()) self._settings.setValue("python_appliables_path", self._python_appliables_path) def _rec_repopulate_python_appliable_menu(self, cur_path : str, cur_depth : int, cur_menu : QtWidgets.QMenu, max_depth : int = -1 ) -> None: if cur_depth > max_depth and max_depth != -1: return log.debug(f"Currently at {cur_path}") for cur_item in os.listdir(cur_path): path = os.path.join(cur_path, cur_item) if os.path.isdir(path) and cur_item != "__pycache__": #Skip __pycache__ folder new_menu = QtWidgets.QMenu(cur_menu) #Folder = Action menu new_menu.setTitle(cur_item) self._rec_repopulate_python_appliable_menu( os.path.join(cur_path, cur_item), cur_depth=cur_depth+1, cur_menu=new_menu) cur_menu.addAction(new_menu.menuAction()) else: if len(cur_item.rsplit(".", 1)) < 2 or cur_item.rsplit(".", 1)[1] != "py": #Skip non-python cur_items continue name = cur_item.rsplit(".", 1)[0] newaction = QtGui.QAction(name) self.menu_actions.append(newaction) cur_menu.addAction(newaction) newaction.triggered.connect(lambda *_, path=path, name=name: self.run_python_appliable(path)) def popup_set_python_appliables_folder(self): """ Reloads the python appliable menu based on the selected folder """ log.debug("Popup set python appliables folder") new_path = QtWidgets.QFileDialog.getExistingDirectory(self, "Select folder with python appliables") if new_path is None or new_path == "": return self._python_appliables_path = new_path self.recreate_python_appliable_menu() def recreate_python_appliable_menu(self): """ Reloads the python appliable menu based on the selected folder """ self.menu_actions = [] self.ui.menuPython_File.clear() self.ui.menuPython_File2.clear() log.debug("Recreating python appliables thingy") try: if self._python_appliables_path is None: raise ValueError("No python appliables path set") self._rec_repopulate_python_appliable_menu( self._python_appliables_path, cur_depth=0, cur_menu=self.ui.menuPython_File) self._rec_repopulate_python_appliable_menu( self._python_appliables_path, cur_depth=0, cur_menu=self.ui.menuPython_File2) self.ui.menuPython_File.addSeparator() except Exception as ex: #pylint: disable=broad-except log.error(f"Error when repopulating appliables: {ex}") set_folder_action = QtGui.QAction("Set Folder...") self.menu_actions.append(set_folder_action) icon = QtGui.QIcon(":/Icons/icons/Custom Icons/python-folder-open.svg") set_folder_action.setIcon(icon) self.ui.menuPython_File.addAction(set_folder_action) self.ui.menuPython_File2.addAction(set_folder_action) set_folder_action.triggered.connect(self.popup_set_python_appliables_folder) newaction = QtGui.QAction("Refresh") self.menu_actions.append(newaction) self.ui.menuPython_File.addAction(newaction) self.ui.menuPython_File2.addAction(newaction) icon = QtGui.QIcon(":/Icons/icons/Tango Icons/actions/view-refresh.svg") newaction.setIcon(icon) newaction.triggered.connect(self.recreate_python_appliable_menu) def run_python_appliable(self, path): """Run a python appliable """ try: spec = importlib.util.spec_from_file_location("newmodule", path) #Reload module each time if spec is None: raise ModuleNotFoundError("Could not load spec of python appliable (module).") module = importlib.util.module_from_spec(spec) # sys.modules["LoadedModule"] = module spec.loader.exec_module(module) #type: ignore #Check if "apply"-function exists in module if hasattr(module, "apply"): module.apply(self.graph_data_model, self.graph_settings_model, self) else: with open(path, encoding="utf-8") as pythonfile: code = pythonfile.read() #Load pythonfile self.graph_data_model.apply_python_code(code) except Exception as ex: #pylint: disable=broad-exception-caught msg = f"Error during execution of appliable: {ex}" log.warning(msg) create_qt_warningbox(f"{msg} \n\n {traceback.format_exc()}", "Error during execution") log.warning(traceback.format_exc()) def execute_python_executable(self, path): """Execute a python file as an executable in this context""" code = None try: with open(path, encoding="utf-8") as pythonfile: code = pythonfile.read() #Load pythonfile except FileNotFoundError: create_qt_warningbox("File not found", f"Could not find file {path}") return success, msg = self.graph_data_model.apply_python_code(code) msgbox = QtWidgets.QMessageBox() msgbox.setText(f"{msg}") msgbox.setWindowTitle("Code Execution") if success: msgbox.setIcon(QtWidgets.QMessageBox.Icon.Warning) else: msgbox.setIcon(QtWidgets.QMessageBox.Icon.Information) def open_view_copy(self, graph_settings_model = None): """Open a copy of the current view in a new window""" assert graph_settings_model is None or not isinstance(graph_settings_model, GraphSettingsModel) new_settings = GraphSettingsModel() new_settings.copy_attrs(self.graph_settings_model) new_view = self.graph_controller.open_view_window( self.graph_data_model, graph_settings_model=new_settings, parent=self ) self.graph_view_windows.append(new_view) # self.view_window = QtWidgets.QMainWindow(self) screen_rect = QtGui.QGuiApplication.primaryScreen().geometry() screen_rect.setSize(QtCore.QSize( int(0.7* screen_rect.width()),int(0.7* screen_rect.height()))) new_view.setGeometry(screen_rect) qt_rect = new_view.frameGeometry() cent_point = QtGui.QGuiApplication.primaryScreen().geometry().center() qt_rect.moveCenter(cent_point) new_view.move(qt_rect.topLeft()) new_view.show() def open_label_rename_window(self): """Opens the label renaming tool-window""" log.info("Opening renaming tool") if self.label_rename_window: log.info("Window already exists") self.label_rename_window.window.setHidden(False) self.label_rename_window.window.show() # self.label_rename_window.window.activateWindow() self.label_rename_window.window.raise_() #Unminimize self.label_rename_window.window.setWindowState( self.label_rename_window.window.windowState() & ~QtCore.Qt.WindowState.WindowMinimized | QtCore.Qt.WindowState.WindowActive) else: self.label_rename_window = RenameLabelWindow(self.graph_data_model, parent=self) def open_merge_label_column_window(self): """Opens the label merging tool-window""" log.info("Opening column merging tool") if self.label_column_merge_tool: log.info("Window already exists") self.label_column_merge_tool.window.setHidden(False) self.label_column_merge_tool.window.show() self.label_column_merge_tool.window.raise_() self.label_column_merge_tool.window.setWindowState( self.label_column_merge_tool.window.windowState() & ~QtCore.Qt.WindowState.WindowMinimized | QtCore.Qt.WindowState.WindowActive) else: self.label_column_merge_tool = MergeColumnWindow(self.graph_data_model, parent=self) def open_python_window(self): """ Opens the python-code window if it does not exist, else brings it to the front and unhides it """ log.info("Now opening python window") if self.apply_python_window: log.info("Winodw already exists?") self.apply_python_window.setHidden(False) self.apply_python_window.show() self.apply_python_window.raise_() self.apply_python_window.setWindowState( self.apply_python_window.windowState() & ~QtCore.Qt.WindowState.WindowMinimized | QtCore.Qt.WindowState.WindowActive) else: self.apply_python_window = ApplyPythonWindow(self.graph_data_model, main_window=self) def closeEvent(self, a0: QtGui.QCloseEvent) -> None: """Overload default close event for a confirmation """ # ConfirmationBox = QtGui.QMessageBox() quit_msg = "Are you sure you want to exit the program? All unsaved progress will be lost." ret = QtWidgets.QMessageBox.question(self, 'Confirm', quit_msg, QtWidgets.QMessageBox.StandardButton.Yes, QtWidgets.QMessageBox.StandardButton.No) if ret == QtWidgets.QMessageBox.StandardButton.Yes: log.info("Closing main window!") # closemain = True self.save_settings() #Save settings self.close() log.info("Also attempting to close all graph views!") for wind in self.graph_view_windows: wind.close() else: a0.ignore() #Else - do not close if __name__=="__main__": app = QtWidgets.QApplication(sys.argv) mainwin = QtWidgets.QMainWindow() w = MainWindow() w.show() app.exec_() print("Done")
PypiClean
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/contenttypes/management/__init__.py
from django.apps import apps as global_apps from django.db import DEFAULT_DB_ALIAS, IntegrityError, migrations, router, transaction class RenameContentType(migrations.RunPython): def __init__(self, app_label, old_model, new_model): self.app_label = app_label self.old_model = old_model self.new_model = new_model super().__init__(self.rename_forward, self.rename_backward) def _rename(self, apps, schema_editor, old_model, new_model): ContentType = apps.get_model("contenttypes", "ContentType") db = schema_editor.connection.alias if not router.allow_migrate_model(db, ContentType): return try: content_type = ContentType.objects.db_manager(db).get_by_natural_key( self.app_label, old_model ) except ContentType.DoesNotExist: pass else: content_type.model = new_model try: with transaction.atomic(using=db): content_type.save(using=db, update_fields={"model"}) except IntegrityError: # Gracefully fallback if a stale content type causes a # conflict as remove_stale_contenttypes will take care of # asking the user what should be done next. content_type.model = old_model else: # Clear the cache as the `get_by_natural_key()` call will cache # the renamed ContentType instance by its old model name. ContentType.objects.clear_cache() def rename_forward(self, apps, schema_editor): self._rename(apps, schema_editor, self.old_model, self.new_model) def rename_backward(self, apps, schema_editor): self._rename(apps, schema_editor, self.new_model, self.old_model) def inject_rename_contenttypes_operations( plan=None, apps=global_apps, using=DEFAULT_DB_ALIAS, **kwargs ): """ Insert a `RenameContentType` operation after every planned `RenameModel` operation. """ if plan is None: return # Determine whether or not the ContentType model is available. try: ContentType = apps.get_model("contenttypes", "ContentType") except LookupError: available = False else: if not router.allow_migrate_model(using, ContentType): return available = True for migration, backward in plan: if (migration.app_label, migration.name) == ("contenttypes", "0001_initial"): # There's no point in going forward if the initial contenttypes # migration is unapplied as the ContentType model will be # unavailable from this point. if backward: break else: available = True continue # The ContentType model is not available yet. if not available: continue inserts = [] for index, operation in enumerate(migration.operations): if isinstance(operation, migrations.RenameModel): operation = RenameContentType( migration.app_label, operation.old_name_lower, operation.new_name_lower, ) inserts.append((index + 1, operation)) for inserted, (index, operation) in enumerate(inserts): migration.operations.insert(inserted + index, operation) def get_contenttypes_and_models(app_config, using, ContentType): if not router.allow_migrate_model(using, ContentType): return None, None ContentType.objects.clear_cache() content_types = { ct.model: ct for ct in ContentType.objects.using(using).filter(app_label=app_config.label) } app_models = {model._meta.model_name: model for model in app_config.get_models()} return content_types, app_models def create_contenttypes( app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs, ): """ Create content types for models in the given app. """ if not app_config.models_module: return app_label = app_config.label try: app_config = apps.get_app_config(app_label) ContentType = apps.get_model("contenttypes", "ContentType") except LookupError: return content_types, app_models = get_contenttypes_and_models( app_config, using, ContentType ) if not app_models: return cts = [ ContentType( app_label=app_label, model=model_name, ) for (model_name, model) in app_models.items() if model_name not in content_types ] ContentType.objects.using(using).bulk_create(cts) if verbosity >= 2: for ct in cts: print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
PypiClean
/MongoPhile-0.1.0.tar.gz/MongoPhile-0.1.0/mongophile/parser.py
import pymongo import re import cli.log try: import json except: import simplejson as json from itertools import * from collections import defaultdict from ops import * insert_re = re.compile("^insert\s((.*)\.([0-9a-zA-Z$._]*))") query_re = re.compile("^query\s((.*)\.([0-9a-zA-Z$._]*))\s*(ntoreturn:(\d*))?(scanAndOrder\s)?\sreslen:(\d*)\snscanned:(\d*)\s*(query:\s({.*}))?\s*nreturned:(\d*)") update_re = re.compile("^update\s((.*)\.([0-9a-zA-Z$._]*))\s*query:\s({.*})\snscanned:(\d*)\s*(fastmod|fastmodinsert|upsert|moved)") cmd_re = re.compile("^query\s(.*)\.\\$cmd\sntoreturn:(\d*)\scommand:\s({.*})\sreslen:(\d*)") # TODO getmore class ProfilerParser(object): op_list = [] noops = 0 # Ops we don't care about(E.G. insert) badops = 0 # Ops we couldn't parse (even if they were noops) def __init__(self, app, data): """ Initialize the parser. DataHandle should be an iterable of Python dictionary representations of profiler log entries. A pymongo cursor to the profile collection works too. """ assert hasattr(data, "__iter__"), "Data must be an iterable object." self.app = app self.input_data = data app.log.debug("Loaded %d profile entries.", len(data)) for entry in data: obj = self.parseEntry(entry) if obj: self.op_list.append(obj) self.ordered_op_list = self.op_list self.op_list = sorted(self.op_list, lambda x, y: cmp(y.millis, x.millis)) app.log.info("Read %d Ops, and parsed %d properly. %d were NoOps (things we don't care about) and %d failed to parse." % (len(data), len(self.op_list), self.noops, self.badops)) self.ops = defaultdict(list) for k, g in groupby(self.op_list, key=lambda x: x.opType): self.ops[k].append(list(g)) def parseEntry(self, entry): self.app.log.debug("TS: %s Millis: %d", entry['ts'], entry['millis']) # parse the debug block info = entry['info'] if info.startswith("insert"): return self.insertParse(info, entry['ts'], entry['millis']) elif info.startswith("query"): self.app.log.debug("Query Op") if info.find("$cmd") >= 0: return self.cmdParse(info, entry['ts'], entry['millis']) else: return self.queryParse(info, entry['ts'], entry['millis']) elif info.startswith("update"): return self.updateParse(info, entry['ts'], entry['millis']) elif info.startswith("remove"): self.app.log.debug("remove op, ignored") self.noops += 1 else: self.app.log.warn("Unknown Op in %s", info) self.noops += 1 def insertParse(self, info, ts, millis): self.app.log.debug("Insert Op") data = insert_re.search(info) if data: db = data.group(2) coll = data.group(3) #self.app.log.debug("DB: %s Coll: %s", db, coll) self.noops += 1 else: self.app.log.info("Failed to match RegEx on insert '%s'", info) self.badops += 1 def cmdParse(self, info, ts, millis): self.app.log.debug("Command Op") data = cmd_re.search(info) if data: db = data.group(1) ntoreturn = data.group(2) command = data.group(3) reslen = data.group(4) self.app.log.debug("[Command] DB: %s NToReturn: %s Command: %s ResLen:%s", db, ntoreturn, command, reslen) return MongoCommand(self.app.log, ts, millis, db, ntoreturn, command, reslen) else: self.app.log.info("Failed to match RegEx on command '%s'" % info) self.badops += 1 def queryParse(self, info, ts, millis): data = query_re.search(info) if data: db = data.group(2) coll = data.group(3) ntoreturn = data.group(5) scanAndOrder = data.group(6) reslen = data.group(7) nscanned = data.group(8) query = data.group(10) nreturned = data.group(11) self.app.log.debug("[Query] DB: %s Coll: %s Scan And Order?: %s NToReturn: %s ResLen:%s NScanned:%s Query:%s NReturned:%s", db, coll, scanAndOrder, ntoreturn, reslen, nscanned, query, nreturned) return MongoQuery(self.app.log, ts, millis, db, coll, ntoreturn, scanAndOrder, reslen, nscanned, query, nreturned) else: self.app.log.info("Failed to match RegEx on query '%s'" % info) self.badops += 1 def updateParse(self, info, ts, millis): self.app.log.debug("Update Op") data = update_re.search(info) if data: db = data.group(2) coll = data.group(3) query = data.group(4) nscanned = data.group(5) opType = data.group(6) self.app.log.debug("[Update] DB: %s Coll: %s NScanned: %s OpType: %s Query: %s", db, coll, nscanned, opType, query) return MongoUpdate(self.app.log, ts, millis, db, coll, query, nscanned, opType) else: self.app.log.info("Failed to match RegEx on update '%s'" % info) self.noobadops += 1 @cli.log.LoggingApp def mongophile(app): def arg_assert(test, msg): if not test: app.argparser.error(msg) if not app.params.file or app.params.explain: arg_assert(app.params.host, "MongoDB (--host) Hostname must be defined with non-file or explain mode..") arg_assert(app.params.port, "MongoDB (--port) Port must be defined with non-file or explain mode..") arg_assert(app.params.db, "MongoDB (--db) Database must be defined with non-file or explain mode..") mongo = pymongo.Connection(app.params.host, app.params.port) useServer = True if app.params.file: app.log.info("File input mode. Skipping server read, sourcing JSON from '%s'." % app.params.file) fh = open(app.params.file, 'r') try: data = json.load(fh) except: app.log.warn("Parsing File as a JSON array failed. Trying to parse it as multiple JSON entries, one per line.") data = [] fh.seek(0) for line in fh: data.append(json.loads(line)) else: app.log.warn("Will read data from server connection.") data = mongo['system.profile'].find() if app.params.explain: raise Exception("Explain mode not currently supported") app.log.debug("Data: %s", data) parser = ProfilerParser(app, data) print "Read %d Ops, and parsed %d properly. %d were NoOps (things we don't care about) and %d failed to parse." % (len(parser.input_data), len(parser.op_list), parser.noops, parser.badops) print "Top 10 Slowest Operations..." for op in parser.op_list[0:10]: print "\t%s\n" % op mongophile.add_param("-x", "--host", help="MongoDB host to read from", default="localhost") mongophile.add_param("-p", "--port", help="MongoDB port to read from", default=27017) mongophile.add_param("-d", "--db", help="MongoDB Database to read from") mongophile.add_param("-f", "--file", help="File to read from (Optional, parses JSON instead of connecting to MongoDB)") mongophile.add_param("-e", "--explain", help="Attempt to explain each query. If using a file, specify host, port, db to use this", default=False, action="store_true") def main(): mongophile.run()
PypiClean
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/basic/lang/ug.js
/* Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/license */ CKEDITOR.lang['ug']={"editor":"تەھرىرلىگۈچ","editorPanel":"مول تېكست تەھرىرلىگۈچ تاختىسى","common":{"editorHelp":"ALT+0 نى بېسىپ ياردەمنى كۆرۈڭ","browseServer":"كۆرسىتىش مۇلازىمېتىر","url":"ئەسلى ھۆججەت","protocol":"كېلىشىم","upload":"يۈكلە","uploadSubmit":"مۇلازىمېتىرغا يۈكلە","image":"سۈرەت","flash":"چاقماق","form":"جەدۋەل","checkbox":"كۆپ تاللاش رامكىسى","radio":"يەككە تاللاش توپچىسى","textField":"يەككە قۇر تېكىست","textarea":"كۆپ قۇر تېكىست","hiddenField":"يوشۇرۇن دائىرە","button":"توپچا","select":"تىزىم/تىزىملىك","imageButton":"سۈرەت دائىرە","notSet":"‹تەڭشەلمىگەن›","id":"ID","name":"ئات","langDir":"تىل يۆنىلىشى","langDirLtr":"سولدىن ئوڭغا (LTR)","langDirRtl":"ئوڭدىن سولغا (RTL)","langCode":"تىل كودى","longDescr":"تەپسىلىي چۈشەندۈرۈش ئادرېسى","cssClass":"ئۇسلۇب خىلىنىڭ ئاتى","advisoryTitle":"ماۋزۇ","cssStyle":"قۇر ئىچىدىكى ئۇسلۇبى","ok":"جەزملە","cancel":"ۋاز كەچ","close":"تاقا","preview":"ئالدىن كۆزەت","resize":"چوڭلۇقىنى ئۆزگەرت","generalTab":"ئادەتتىكى","advancedTab":"ئالىي","validateNumberFailed":"سان پىچىمىدا كىرگۈزۈش زۆرۈر","confirmNewPage":"نۆۋەتتىكى پۈتۈك مەزمۇنى ساقلانمىدى، يېڭى پۈتۈك قۇرامسىز؟","confirmCancel":"قىسمەن ئۆزگەرتىش ساقلانمىدى، بۇ سۆزلەشكۈنى تاقامسىز؟","options":"تاللانما","target":"نىشان كۆزنەك","targetNew":"يېڭى كۆزنەك (_blank)","targetTop":"پۈتۈن بەت (_top)","targetSelf":"مەزكۇر كۆزنەك (_self)","targetParent":"ئاتا كۆزنەك (_parent)","langDirLTR":"سولدىن ئوڭغا (LTR)","langDirRTL":"ئوڭدىن سولغا (RTL)","styles":"ئۇسلۇبلار","cssClasses":"ئۇسلۇب خىللىرى","width":"كەڭلىك","height":"ئېگىزلىك","align":"توغرىلىنىشى","left":"سول","right":"ئوڭ","center":"ئوتتۇرا","justify":"ئىككى تەرەپتىن توغرىلا","alignLeft":"سولغا توغرىلا","alignRight":"ئوڭغا توغرىلا","alignCenter":"Align Center","alignTop":"ئۈستى","alignMiddle":"ئوتتۇرا","alignBottom":"ئاستى","alignNone":"يوق","invalidValue":"ئىناۋەتسىز قىممەت.","invalidHeight":"ئېگىزلىك چوقۇم رەقەم پىچىمىدا بولۇشى زۆرۈر","invalidWidth":"كەڭلىك چوقۇم رەقەم پىچىمىدا بولۇشى زۆرۈر","invalidLength":"بەلگىلەنگەن قىممەت \"1%\" سۆز بۆلىكىدىكى ئېنىقسىز ماتىريال ياكى مۇسبەت سانلار (2%).","invalidCssLength":"بۇ سۆز بۆلىكى چوقۇم مۇۋاپىق بولغان CSS ئۇزۇنلۇق قىممىتى بولۇشى زۆرۈر، بىرلىكى (px, %, in, cm, mm, em, ex, pt ياكى pc)","invalidHtmlLength":"بۇ سۆز بۆلىكى چوقۇم بىرىكمە HTML ئۇزۇنلۇق قىممىتى بولۇشى كېرەك. ئۆز ئىچىگە ئالىدىغان بىرلىك (px ياكى %)","invalidInlineStyle":"ئىچكى باغلانما ئۇسلۇبى چوقۇم چېكىتلىك پەش بىلەن ئايرىلغان بىر ياكى كۆپ «خاسلىق ئاتى:خاسلىق قىممىتى» پىچىمىدا بولۇشى لازىم","cssLengthTooltip":"بۇ سۆز بۆلىكى بىرىكمە CSS ئۇزۇنلۇق قىممىتى بولۇشى كېرەك. ئۆز ئىچىگە ئالىدىغان بىرلىك (px, %, in, cm, mm, em, ex, pt ياكى pc)","unavailable":"%1<span class=\\\\\"cke_accessibility\\\\\">، ئىشلەتكىلى بولمايدۇ</span>","keyboard":{"8":"Backspace","13":"Enter","16":"Shift","17":"Ctrl","18":"Alt","32":"Space","35":"End","36":"Home","46":"ئۆچۈر","112":"F1","113":"F2","114":"F3","115":"F4","116":"F5","117":"F6","118":"F7","119":"F8","120":"F9","121":"F10","122":"F11","123":"F12","124":"F13","125":"F14","126":"F15","127":"F16","128":"F17","129":"F18","130":"F19","131":"F20","132":"F21","133":"F22","134":"F23","135":"F24","224":"Command"},"keyboardShortcut":"تېزلەتمە كونۇپكا","optionDefault":"سۈكۈتتىكى"},"about":{"copy":"Copyright &copy; $1. نەشر ھوقۇقىغا ئىگە","dlgTitle":"CKEditor تەھرىرلىگۈچى 4 ھەقىدە","moreInfo":"تور تۇرايىمىزنى زىيارەت قىلىپ كېلىشىمگە ئائىت تېخىمۇ كۆپ ئۇچۇرغا ئېرىشىڭ"},"basicstyles":{"bold":"توم","italic":"يانتۇ","strike":"ئۆچۈرۈش سىزىقى","subscript":"تۆۋەن ئىندېكس","superscript":"يۇقىرى ئىندېكس","underline":"ئاستى سىزىق"},"notification":{"closed":"ئوقتۇرۇش تاقالدى."},"toolbar":{"toolbarCollapse":"قورال بالداقنى قاتلا","toolbarExpand":"قورال بالداقنى ياي","toolbarGroups":{"document":"پۈتۈك","clipboard":"چاپلاش تاختىسى/يېنىۋال","editing":"تەھرىر","forms":"جەدۋەل","basicstyles":"ئاساسىي ئۇسلۇب","paragraph":"ئابزاس","links":"ئۇلانما","insert":"قىستۇر","styles":"ئۇسلۇب","colors":"رەڭ","tools":"قورال"},"toolbars":"قورال بالداق"},"clipboard":{"copy":"كۆچۈر","copyError":"تور كۆرگۈڭىزنىڭ بىخەتەرلىك تەڭشىكى تەھرىرلىگۈچنىڭ كۆچۈر مەشغۇلاتىنى ئۆزلۈكىدىن ئىجرا قىلىشىغا يول قويمايدۇ، ھەرپتاختا تېز كۇنۇپكا (Ctrl/Cmd+C) ئارقىلىق تاماملاڭ","cut":"كەس","cutError":"تور كۆرگۈڭىزنىڭ بىخەتەرلىك تەڭشىكى تەھرىرلىگۈچنىڭ كەس مەشغۇلاتىنى ئۆزلۈكىدىن ئىجرا قىلىشىغا يول قويمايدۇ، ھەرپتاختا تېز كۇنۇپكا (Ctrl/Cmd+X) ئارقىلىق تاماملاڭ","paste":"چاپلا","pasteNotification":"چاپلانغىنى 1% . سىزنىڭ تور كۆرگۈچىڭىز قۇرال تەكچىسى ۋە سىيرىلما تاللاپ چاپلاش ئىقتىدارىنى قوللىمايدىكەن .","pasteArea":"چاپلاش دائىرىسى","pasteMsg":"مەزمۇنىڭىزنى تۆۋەندىكى رايونغا چاپلاپ ئاندىن OK نى بېسىڭ ."},"indent":{"indent":"تارايت","outdent":"كەڭەيت"},"fakeobjects":{"anchor":"لەڭگەرلىك نۇقتا","flash":"Flash جانلاندۇرۇم","hiddenfield":"يوشۇرۇن دائىرە","iframe":"IFrame","unknown":"يوچۇن نەڭ"},"link":{"acccessKey":"زىيارەت كۇنۇپكا","advanced":"ئالىي","advisoryContentType":"مەزمۇن تىپى","advisoryTitle":"ماۋزۇ","anchor":{"toolbar":"لەڭگەرلىك نۇقتا ئۇلانمىسى قىستۇر/تەھرىرلە","menu":"لەڭگەرلىك نۇقتا ئۇلانما خاسلىقى","title":"لەڭگەرلىك نۇقتا ئۇلانما خاسلىقى","name":"لەڭگەرلىك نۇقتا ئاتى","errorName":"لەڭگەرلىك نۇقتا ئاتىنى كىرگۈزۈڭ","remove":"لەڭگەرلىك نۇقتا ئۆچۈر"},"anchorId":"لەڭگەرلىك نۇقتا ID سى بويىچە","anchorName":"لەڭگەرلىك نۇقتا ئاتى بويىچە","charset":"ھەرپ كودلىنىشى","cssClasses":"ئۇسلۇب خىلى ئاتى","download":"Force Download","displayText":"Display Text","emailAddress":"ئادرېس","emailBody":"مەزمۇن","emailSubject":"ماۋزۇ","id":"ID","info":"ئۇلانما ئۇچۇرى","langCode":"تىل كودى","langDir":"تىل يۆنىلىشى","langDirLTR":"سولدىن ئوڭغا (LTR)","langDirRTL":"ئوڭدىن سولغا (RTL)","menu":"ئۇلانما تەھرىر","name":"ئات","noAnchors":"(بۇ پۈتۈكتە ئىشلەتكىلى بولىدىغان لەڭگەرلىك نۇقتا يوق)","noEmail":"ئېلخەت ئادرېسىنى كىرگۈزۈڭ","noUrl":"ئۇلانما ئادرېسىنى كىرگۈزۈڭ","noTel":"Please type the phone number","other":"‹باشقا›","phoneNumber":"Phone number","popupDependent":"تەۋە (NS)","popupFeatures":"قاڭقىش كۆزنەك خاسلىقى","popupFullScreen":"پۈتۈن ئېكران (IE)","popupLeft":"سول","popupLocationBar":"ئادرېس بالداق","popupMenuBar":"تىزىملىك بالداق","popupResizable":"چوڭلۇقى ئۆزگەرتىشچان","popupScrollBars":"دومىلىما سۈرگۈچ","popupStatusBar":"ھالەت بالداق","popupToolbar":"قورال بالداق","popupTop":"ئوڭ","rel":"باغلىنىش","selectAnchor":"بىر لەڭگەرلىك نۇقتا تاللاڭ","styles":"قۇر ئىچىدىكى ئۇسلۇبى","tabIndex":"Tab تەرتىپى","target":"نىشان","targetFrame":"‹كاندۇك›","targetFrameName":"نىشان كاندۇك ئاتى","targetPopup":"‹قاڭقىش كۆزنەك›","targetPopupName":"قاڭقىش كۆزنەك ئاتى","title":"ئۇلانما","toAnchor":"بەت ئىچىدىكى لەڭگەرلىك نۇقتا ئۇلانمىسى","toEmail":"ئېلخەت","toUrl":"ئادرېس","toPhone":"Phone","toolbar":"ئۇلانما قىستۇر/تەھرىرلە","type":"ئۇلانما تىپى","unlink":"ئۇلانما بىكار قىل","upload":"يۈكلە"},"list":{"bulletedlist":"تۈر بەلگە تىزىمى","numberedlist":"تەرتىپ نومۇر تىزىمى"},"undo":{"redo":"قايتىلا ","undo":"يېنىۋال"}};
PypiClean
/3d-wallet-generator-0.2.0.tar.gz/3d-wallet-generator-0.2.0/README.rst
3D Wallet Generator =================== This project helps you design and export 3D-printable wallets, similar to paper wallets (but they won't die in a flood) ----------------------------------------------------------------------------------------------------------------------- Everyone who's seriously serious about bitcoin has tried paper wallet generators. While the idea is great, paper isn't a great medium out of which to make something that stores significant value. This this in mind, we set out to make a simple, easy-to-use software that can design and export 3D-printable wallets, with a variety of configuration options. Dependencies ------------ - Python3: this project is designed for Python3, not Python2 - PyBitcoin, ``sudo pip3 install bitcoin`` **(no manual installation required)** - PyQRCode, ``sudo pip3 install pyqrcode`` **(no manual installation required)** - OpenSCAD 2015 (or higher), just install from their website, and the program should find it automatically (submit an issue if it doesn't) - **(manual installation required)** Features -------- - Supports a variety of configuration and size options - Exports wallets as STL - Export keys as CSV-file for import into other software (for big batches) - Set the configuration and let it generate millions of **random** wallets for you - Support for other cryptocurrencies, including: - Bitcoin - Litecoin - Dogecoin - Any other currency (as long as you know the version bit for address generation) Instructions ------------ 1. Install pip - Windows: download from their website - Mac: install from MacPorts or Brew - Linux (Ubuntu/Debian): ``sudo apt-get install python3-pip`` 2. Install OpenSCAD - `Download from their website <http://openscad.org/downloads.html>`_ - Make sure you are running their newest version (or at least OpenSCAD 2015) - Contact us if you need help. 3. Install our package - Try: ``sudo pip3 install 3d-wallet-generator`` - If it continues to fail, shoot us an email and we'll try to help. 4. Use our package - Run ``3dwallet -h`` to see your options - Try the default settings by running `3dwallet` - it will output five wallets, with the default settings, into a folder in your current directory. - Play with the other settings and decide how your printer, CNC, etc. likes the different styles. - Film it or take a picture, and give it to us! We'll add it to our collection! We recommend you run the Linux version off of a LiveUSB for maximum security (just as you would with a normal paper wallet). Miscellaneous ------------- - If you have any comments, questions, or feature requests, either submit an issue or contact us at btcspry@bitforwarder.com - We always accept donations at **1MF7hKShzq2iSV9ZZ9hEx6ATnHQpFtM7cF!!** Please donate, this project took a bunch of effort and we want to make sure it was worth it. To Do / Features Coming Soon ---------------------------- - Add pictures - Add option to import your own addresses/private keys - Offset the white in the QR code (instead of just offsetting the black) - If you want any of these developed faster, send us a gift to our donation address above.
PypiClean
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/gis/admin/widgets.py
import logging import warnings from django.contrib.gis.gdal import GDALException from django.contrib.gis.geos import GEOSException, GEOSGeometry from django.forms.widgets import Textarea from django.utils import translation from django.utils.deprecation import RemovedInDjango50Warning # Creating a template context that contains Django settings # values needed by admin map templates. geo_context = {"LANGUAGE_BIDI": translation.get_language_bidi()} logger = logging.getLogger("django.contrib.gis") class OpenLayersWidget(Textarea): """ Render an OpenLayers map using the WKT of the geometry. """ def __init__(self, *args, **kwargs): warnings.warn( "django.contrib.gis.admin.OpenLayersWidget is deprecated.", RemovedInDjango50Warning, stacklevel=2, ) super().__init__(*args, **kwargs) def get_context(self, name, value, attrs): # Update the template parameters with any attributes passed in. if attrs: self.params.update(attrs) self.params["editable"] = self.params["modifiable"] else: self.params["editable"] = True # Defaulting the WKT value to a blank string -- this # will be tested in the JavaScript and the appropriate # interface will be constructed. self.params["wkt"] = "" # If a string reaches here (via a validation error on another # field) then just reconstruct the Geometry. if value and isinstance(value, str): try: value = GEOSGeometry(value) except (GEOSException, ValueError) as err: logger.error("Error creating geometry from value '%s' (%s)", value, err) value = None if ( value and value.geom_type.upper() != self.geom_type and self.geom_type != "GEOMETRY" ): value = None # Constructing the dictionary of the map options. self.params["map_options"] = self.map_options() # Constructing the JavaScript module name using the name of # the GeometryField (passed in via the `attrs` keyword). # Use the 'name' attr for the field name (rather than 'field') self.params["name"] = name # note: we must switch out dashes for underscores since js # functions are created using the module variable js_safe_name = self.params["name"].replace("-", "_") self.params["module"] = "geodjango_%s" % js_safe_name if value: # Transforming the geometry to the projection used on the # OpenLayers map. srid = self.params["srid"] if value.srid != srid: try: ogr = value.ogr ogr.transform(srid) wkt = ogr.wkt except GDALException as err: logger.error( "Error transforming geometry from srid '%s' to srid '%s' (%s)", value.srid, srid, err, ) wkt = "" else: wkt = value.wkt # Setting the parameter WKT with that of the transformed # geometry. self.params["wkt"] = wkt self.params.update(geo_context) return self.params def map_options(self): """Build the map options hash for the OpenLayers template.""" # JavaScript construction utilities for the Bounds and Projection. def ol_bounds(extent): return "new OpenLayers.Bounds(%s)" % extent def ol_projection(srid): return 'new OpenLayers.Projection("EPSG:%s")' % srid # An array of the parameter name, the name of their OpenLayers # counterpart, and the type of variable they are. map_types = [ ("srid", "projection", "srid"), ("display_srid", "displayProjection", "srid"), ("units", "units", str), ("max_resolution", "maxResolution", float), ("max_extent", "maxExtent", "bounds"), ("num_zoom", "numZoomLevels", int), ("max_zoom", "maxZoomLevels", int), ("min_zoom", "minZoomLevel", int), ] # Building the map options hash. map_options = {} for param_name, js_name, option_type in map_types: if self.params.get(param_name, False): if option_type == "srid": value = ol_projection(self.params[param_name]) elif option_type == "bounds": value = ol_bounds(self.params[param_name]) elif option_type in (float, int): value = self.params[param_name] elif option_type in (str,): value = '"%s"' % self.params[param_name] else: raise TypeError map_options[js_name] = value return map_options
PypiClean
/DuctApe-0.18.2.tar.gz/DuctApe-0.18.2/ductape/common/terminalprogress.py
import re import sys class TerminalController: BOL = '' #: Move the cursor to the beginning of the line UP = '' #: Move the cursor up one line DOWN = '' #: Move the cursor down one line LEFT = '' #: Move the cursor left one char RIGHT = '' #: Move the cursor right one char # Deletion: CLEAR_SCREEN = '' #: Clear the screen and move to home position CLEAR_EOL = '' #: Clear to the end of the line. CLEAR_BOL = '' #: Clear to the beginning of the line. CLEAR_EOS = '' #: Clear to the end of the screen # Output modes: BOLD = '' #: Turn on bold mode BLINK = '' #: Turn on blink mode DIM = '' #: Turn on half-bright mode REVERSE = '' #: Turn on reverse-video mode NORMAL = '' #: Turn off all modes # Cursor display: HIDE_CURSOR = '' #: Make the cursor invisible SHOW_CURSOR = '' #: Make the cursor visible # Terminal size: COLS = None #: Width of the terminal (None for unknown) LINES = None #: Height of the terminal (None for unknown) # Foreground colors: BLACK = BLUE = GREEN = CYAN = RED = MAGENTA = YELLOW = WHITE = '' # Background colors: BG_BLACK = BG_BLUE = BG_GREEN = BG_CYAN = '' BG_RED = BG_MAGENTA = BG_YELLOW = BG_WHITE = '' _STRING_CAPABILITIES = """ BOL=cr UP=cuu1 DOWN=cud1 LEFT=cub1 RIGHT=cuf1 CLEAR_SCREEN=clear CLEAR_EOL=el CLEAR_BOL=el1 CLEAR_EOS=ed BOLD=bold BLINK=blink DIM=dim REVERSE=rev UNDERLINE=smul NORMAL=sgr0 HIDE_CURSOR=cinvis SHOW_CURSOR=cnorm""".split() _COLORS = """BLACK BLUE GREEN CYAN RED MAGENTA YELLOW WHITE""".split() _ANSICOLORS = "BLACK RED GREEN YELLOW BLUE MAGENTA CYAN WHITE".split() def __init__(self, term_stream=sys.stdout): # Curses isn't available on all platforms try: import curses except: return # If the stream isn't a tty, then assume it has no capabilities. if not hasattr(term_stream, 'isatty') or not term_stream.isatty(): return # Check the terminal type. If we fail, then assume that the # terminal has no capabilities. try: curses.setupterm() except: return # Look up numeric capabilities. self.COLS = curses.tigetnum('cols') self.LINES = curses.tigetnum('lines') # Look up string capabilities. for capability in self._STRING_CAPABILITIES: (attrib, cap_name) = capability.split('=') setattr(self, attrib, self._tigetstr(cap_name).encode() or b'') # Colors set_fg = self._tigetstr('setf').encode() if set_fg: for i,color in zip(list(range(len(self._COLORS))), self._COLORS): setattr(self, color, curses.tparm(set_fg, i) or b'') set_fg_ansi = self._tigetstr('setaf').encode() if set_fg_ansi: for i,color in zip(list(range(len(self._ANSICOLORS))), self._ANSICOLORS): setattr(self, color, curses.tparm(set_fg_ansi, i) or b'') set_bg = self._tigetstr('setb').encode() if set_bg: for i,color in zip(list(range(len(self._COLORS))), self._COLORS): setattr(self, 'BG_'+color, curses.tparm(set_bg, i) or b'') set_bg_ansi = self._tigetstr('setab').encode() if set_bg_ansi: for i,color in zip(list(range(len(self._ANSICOLORS))), self._ANSICOLORS): setattr(self, 'BG_'+color, curses.tparm(set_bg_ansi, i) or b'') def _tigetstr(self, cap_name): # String capabilities can include "delays" of the form "$<2>". # For any modern terminal, we should be able to just ignore # these, so strip them out. import curses cap = curses.tigetstr(cap_name) or b'' return re.sub(r'\$<\d+>[/*]?', '', cap.decode('utf-8')) def render(self, template): return re.sub(r'\$\$|\${\w+}', self._render_sub, template) def _render_sub(self, match): s = match.group() if s == '$$': return s else: s = getattr(self, s[2:-1]) if isinstance(s, bytes): s = s.decode('utf-8') return s class ProgressBar: BAR = '%3d%% ${GREEN}[${BOLD}%s%s${NORMAL}${GREEN}]${NORMAL}\n' HEADER = '${BOLD}${BLUE}%s${NORMAL}\n\n' def __init__(self, term, header): self.term = term if not (self.term.CLEAR_EOL and self.term.UP and self.term.BOL): self.use_it = False else: self.use_it = True self.width = self.term.COLS or 75 self.bar = term.render(self.BAR) self.header = self.term.render(self.HEADER % header.center(self.width)) self.cleared = 1 #: true if we haven't drawn the bar yet. def update(self, percent, message=''): if not self.use_it: return if isinstance(message, str): message = message.encode('utf-8', 'ignore') if self.cleared: sys.stdout.write(self.header) self.cleared = 0 n = int((self.width-10)*percent) msg = message.center(self.width) s = self.term.BOL + self.term.UP + self.term.CLEAR_EOL s = s.decode('utf-8') sys.stdout.write( s + (self.bar % (100*percent, '='*n, '-'*(self.width-10-n))) + self.term.CLEAR_EOL.decode('utf-8') + msg.decode('utf-8')) sys.stdout.flush() def clear(self): if not self.use_it: return if not self.cleared: sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL + self.term.UP + self.term.CLEAR_EOL + self.term.UP + self.term.CLEAR_EOL) self.cleared = 1
PypiClean
/Catalyst_Lib-0.8.tar.gz/Catalyst_Lib-0.8/Catalyst_Lib/lists.py
from . import dictionaries def contains_duplicates(items: list): for x in items: if items.count(x) != 1: return True return False def combine_list(items: list = None): """ It takes a list of strings and returns a single string that is the concatenation of all the strings in the list :param items: list = None :type items: list :return: the word that is created by combining the items in the list. """ if items is None: return None else: word = "" for i in items: word = f"{word}{i}" return word def flattenList(alist: list = None): """ It takes a list, and if it finds a list within that list, it will recursively call itself to flatten that list, and then add it to the original list :param alist: list = None :type alist: list :return: A list of all the items in the list, but with all the sublists flattened. """ if alist is None: raise ValueError("Did not pass through a valid list to flatten") else: newlist = [] for item in alist: if isinstance(item, list): newlist = newlist + flattenList(item) else: newlist.append(item) return newlist def to_dict(items: list): """ It takes a list of items and returns a dictionary with the first item in the list as the key and the second item as the value :param items: list :type items: list :return: A dictionary """ Dict = {} if len(items) % 2 != 0: raise ValueError("Number of items in list are not even") itemnum = 0 for x in range(int(len(items) / 2)): item = items[itemnum + 1] key = items[itemnum] itemnum = itemnum + 2 Dict[key] = item return Dict class List(list): def append_list(self, items: list = None): if items is None: raise ValueError("Did not pass through valid list to append") else: return List(self + items) def copy(self): """ It returns a copy of the list. :return: A copy of the list. """ return List(self.copy()) def flatten(self): """ It takes a list of lists and returns a list of all the elements in the list of lists """ return List(flattenList(alist=self)) def combine(self): """ It takes a list of strings, and returns a string that is the concatenation of all the strings in the list :return: the result of the combine_list function. """ return List(combine_list(items=self)) def to_dict(self): """ It takes a list of objects and returns a dictionary of dictionaries :return: A dictionary of the list of items. """ return dictionaries.Import(to_dict(items=self)) def split_to_list(self): """ It takes a list of lists and returns a list of lists where each element is a list of one element :return: A list of lists. """ newlist = [] for x in self: newlist.append([x]) return List(newlist) def first_even(self): for x in self: if type(x) != int: raise ValueError(f"Invalid type of item in list: was type {type(x)}") if x % 2 == 0: return x def list(self): return self def first_odd(self): for x in self: if type(x) != int: raise ValueError(f"Invalid type of item in list: was type {type(x)}") if x % 2 != 0: return x def all_evens(self): evens = [] for x in self: if type(x) != int: raise ValueError(f"Invalid type of item in list: was type {type(x)}") if x % 2 == 0: evens.append(x) return evens def all_odds(self): odds = [] for x in self: if type(x) != int: raise ValueError(f"Invalid type of item in list: was type {type(x)}") if x % 2 != 0: odds.append(x) return odds def types(self): types = [] for x in self: types.append(type(x)) return types def find(self, item: any): items = [] for x in self: if x == item: items.append(item) return items
PypiClean
/BIA_OBS-1.0.3.tar.gz/BIA_OBS-1.0.3/BIA/static/dist/node_modules/tailwindcss/lib/lib/defaultExtractor.js
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "defaultExtractor", { enumerable: true, get: ()=>defaultExtractor }); const _featureFlags = require("../featureFlags"); const _regex = /*#__PURE__*/ _interopRequireWildcard(require("./regex")); function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); } function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for(var key in obj){ if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } function defaultExtractor(context) { let patterns = Array.from(buildRegExps(context)); /** * @param {string} content */ return (content)=>{ /** @type {(string|string)[]} */ let results = []; for (let pattern of patterns){ var ref; results = [ ...results, ...(ref = content.match(pattern)) !== null && ref !== void 0 ? ref : [] ]; } return results.filter((v)=>v !== undefined).map(clipAtBalancedParens); }; } function* buildRegExps(context) { let separator = context.tailwindConfig.separator; let variantGroupingEnabled = (0, _featureFlags.flagEnabled)(context.tailwindConfig, "variantGrouping"); let prefix = context.tailwindConfig.prefix !== "" ? _regex.optional(_regex.pattern([ /-?/, _regex.escape(context.tailwindConfig.prefix) ])) : ""; let utility = _regex.any([ // Arbitrary properties /\[[^\s:'"`]+:[^\s\]]+\]/, // Utilities _regex.pattern([ // Utility Name / Group Name /-?(?:\w+)/, // Normal/Arbitrary values _regex.optional(_regex.any([ _regex.pattern([ // Arbitrary values /-(?:\w+-)*\[[^\s:]+\]/, // Not immediately followed by an `{[(` /(?![{([]])/, // optionally followed by an opacity modifier /(?:\/[^\s'"`\\><$]*)?/ ]), _regex.pattern([ // Arbitrary values /-(?:\w+-)*\[[^\s]+\]/, // Not immediately followed by an `{[(` /(?![{([]])/, // optionally followed by an opacity modifier /(?:\/[^\s'"`\\$]*)?/ ]), // Normal values w/o quotes — may include an opacity modifier /[-\/][^\s'"`\\$={><]*/ ])) ]) ]); let variantPatterns = [ // Without quotes _regex.any([ // This is here to provide special support for the `@` variant _regex.pattern([ /@\[[^\s"'`]+\](\/[^\s"'`]+)?/, separator ]), _regex.pattern([ /([^\s"'`\[\\]+-)?\[[^\s"'`]+\]/, separator ]), _regex.pattern([ /[^\s"'`\[\\]+/, separator ]) ]), // With quotes allowed _regex.any([ _regex.pattern([ /([^\s"'`\[\\]+-)?\[[^\s`]+\]/, separator ]), _regex.pattern([ /[^\s`\[\\]+/, separator ]) ]) ]; for (const variantPattern of variantPatterns){ yield _regex.pattern([ // Variants "((?=((", variantPattern, ")+))\\2)?", // Important (optional) /!?/, prefix, variantGroupingEnabled ? _regex.any([ // Or any of those things but grouped separated by commas _regex.pattern([ /\(/, utility, _regex.zeroOrMore([ /,/, utility ]), /\)/ ]), // Arbitrary properties, constrained utilities, arbitrary values, etc… utility ]) : utility ]); } // 5. Inner matches yield /[^<>"'`\s.(){}[\]#=%$]*[^<>"'`\s.(){}[\]#=%:$]/g; } // We want to capture any "special" characters // AND the characters immediately following them (if there is one) let SPECIALS = /([\[\]'"`])([^\[\]'"`])?/g; let ALLOWED_CLASS_CHARACTERS = /[^"'`\s<>\]]+/; /** * Clips a string ensuring that parentheses, quotes, etc… are balanced * Used for arbitrary values only * * We will go past the end of the balanced parens until we find a non-class character * * Depth matching behavior: * w-[calc(100%-theme('spacing[some_key][1.5]'))]'] * ┬ ┬ ┬┬ ┬ ┬┬ ┬┬┬┬┬┬┬ * 1 2 3 4 34 3 210 END * ╰────┴──────────┴────────┴────────┴┴───┴─┴┴┴ * * @param {string} input */ function clipAtBalancedParens(input) { // We are care about this for arbitrary values if (!input.includes("-[")) { return input; } let depth = 0; let openStringTypes = []; // Find all parens, brackets, quotes, etc // Stop when we end at a balanced pair // This is naive and will treat mismatched parens as balanced // This shouldn't be a problem in practice though let matches = input.matchAll(SPECIALS); // We can't use lookbehind assertions because we have to support Safari // So, instead, we've emulated it using capture groups and we'll re-work the matches to accommodate matches = Array.from(matches).flatMap((match)=>{ const [, ...groups] = match; return groups.map((group, idx)=>Object.assign([], match, { index: match.index + idx, 0: group })); }); for (let match of matches){ let char = match[0]; let inStringType = openStringTypes[openStringTypes.length - 1]; if (char === inStringType) { openStringTypes.pop(); } else if (char === "'" || char === '"' || char === "`") { openStringTypes.push(char); } if (inStringType) { continue; } else if (char === "[") { depth++; continue; } else if (char === "]") { depth--; continue; } // We've gone one character past the point where we should stop // This means that there was an extra closing `]` // We'll clip to just before it if (depth < 0) { return input.substring(0, match.index); } // We've finished balancing the brackets but there still may be characters that can be included // For example in the class `text-[#336699]/[.35]` // The depth goes to `0` at the closing `]` but goes up again at the `[` // If we're at zero and encounter a non-class character then we clip the class there if (depth === 0 && !ALLOWED_CLASS_CHARACTERS.test(char)) { return input.substring(0, match.index); } } return input; } // Regular utilities // {{modifier}:}*{namespace}{-{suffix}}*{/{opacityModifier}}? // Arbitrary values // {{modifier}:}*{namespace}-[{arbitraryValue}]{/{opacityModifier}}? // arbitraryValue: no whitespace, balanced quotes unless within quotes, balanced brackets unless within quotes // Arbitrary properties // {{modifier}:}*[{validCssPropertyName}:{arbitraryValue}]
PypiClean
/EnforsML-0.0.3.tar.gz/EnforsML-0.0.3/enforsml/text/emlb.py
from enforsml.text import nlp, utils class ParseError(BaseException): def __init__(self, file_obj, msg): BaseException.__init__(self, "Error in file %s, line %d: %s" % (file_obj.file_name, file_obj.line_num, msg)) class BotFile: """An EnforsML bot file. """ def __init__(self): self.file_name = None self.line_num = 0 self.intents = [] def load(self, file_name): """Load a bot file from the file name file_name. """ self.file_name = file_name self.line_num = 0 intent = None mode = None with open(file_name, "r") as f: for line in f.readlines(): self.line_num += 1 line = self.clean_up_line(line) # Comments if line.startswith("#"): continue # Empty lines elif len(line) == 0: self.end_of_intent(intent) intent = None elif line.startswith("Intent"): intent = self.start_of_intent(line) elif line == "User:": mode = "User" elif line == "Bot:": mode = "Bot" elif line.startswith("- "): self.text_line(line[2:], mode, intent) else: # print(line) pass if intent: self.end_of_intent(intent) return self.intents def clean_up_line(self, line): """Clean up lines read from a file. """ line = line.replace("\r", "") return line.strip() def start_of_intent(self, line): """Called when we find the start of an Intent. """ words = line.split(" ") try: intent_name = words[1] except IndexError: raise ParseError(self, "Intent needs a name") # I don't understand why, but train_sentences=[] MUST be # present in the following line. intent = nlp.Intent(intent_name, train_sentences=[]) return intent def end_of_intent(self, intent): """Called when we are done loading an intent. """ if intent: self.intents.append(intent) # print("Questions:") # print(intent.train_sentences) # print("Answer:") # print(intent.response_data) # print() def text_line(self, line, mode, intent): """Take care of a line beginning with "- ". """ if not intent: raise ParseError(self, "This text line is not part of an Intent") if mode is None: raise ParseError(self, "I don't know if this is a User or Bot line") if mode == "User": intent.add_train_txt(line) elif mode == "Bot": intent.response_data = line
PypiClean
/GBI_distributions-0.2.tar.gz/GBI_distributions-0.2/GBI_distributions/Binomialdistribution.py
import math import matplotlib.pyplot as plt from .Generaldistribution import Distribution class Binomial(Distribution): """ Binomial distribution class for calculating and visualizing a Binomial distribution. Attributes: mean (float) representing the mean value of the distribution stdev (float) representing the standard deviation of the distribution data_list (list of floats) a list of floats to be extracted from the data file p (float) representing the probability of an event occurring n (int) number of trials TODO: Fill out all functions below """ def __init__(self, prob=.5, size=20): self.n = size self.p = prob Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev()) def calculate_mean(self): """Function to calculate the mean from p and n Args: None Returns: float: mean of the data set """ self.mean = self.p * self.n return self.mean def calculate_stdev(self): """Function to calculate the standard deviation from p and n. Args: None Returns: float: standard deviation of the data set """ self.stdev = math.sqrt(self.n * self.p * (1 - self.p)) return self.stdev def replace_stats_with_data(self): """Function to calculate p and n from the data set Args: None Returns: float: the p value float: the n value """ self.n = len(self.data) self.p = 1.0 * sum(self.data) / len(self.data) self.mean = self.calculate_mean() self.stdev = self.calculate_stdev() def plot_bar(self): """Function to output a histogram of the instance variable data using matplotlib pyplot library. Args: None Returns: None """ plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n]) plt.title('Bar Chart of Data') plt.xlabel('outcome') plt.ylabel('count') def pdf(self, k): """Probability density function calculator for the gaussian distribution. Args: x (float): point for calculating the probability density function Returns: float: probability density function output """ a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k))) b = (self.p ** k) * (1 - self.p) ** (self.n - k) return a * b def plot_bar_pdf(self): """Function to plot the pdf of the binomial distribution Args: None Returns: list: x values for the pdf plot list: y values for the pdf plot """ x = [] y = [] # calculate the x values to visualize for i in range(self.n + 1): x.append(i) y.append(self.pdf(i)) # make the plots plt.bar(x, y) plt.title('Distribution of Outcomes') plt.ylabel('Probability') plt.xlabel('Outcome') plt.show() return x, y def __add__(self, other): """Function to add together two Binomial distributions with equal p Args: other (Binomial): Binomial instance Returns: Binomial: Binomial distribution """ try: assert self.p == other.p, 'p values are not equal' except AssertionError as error: raise result = Binomial() result.n = self.n + other.n result.p = self.p result.calculate_mean() result.calculate_stdev() return result def __repr__(self): """Function to output the characteristics of the Binomial instance Args: None Returns: string: characteristics of the Gaussian """ return "mean {}, standard deviation {}, p {}, n {}".\ format(self.mean, self.stdev, self.p, self.n)
PypiClean
/AADeepLearning-1.0.8.tar.gz/AAdeepLearning-1.0.8/example/aa_dnn_cifar10.py
from AADeepLearning import AADeepLearning from AADeepLearning.datasets import cifar10 from AADeepLearning.datasets import np_utils # 载入数据,如果不存在则自动下载 (x_train, y_train), (x_test, y_test) = cifar10.load_data() # 第一个维度是样本数目,第二维度是通道数表示颜色通道数,第三维度是高,第四个维度是宽 print('x_train shape:', x_train.shape) print('y_train shape:', y_train.shape) print('x_test shape:', x_test.shape) print('y_test shape:', y_test.shape) # 将x_train, x_test的数据格式转为float32 x_train = x_train.astype('float32') x_test = x_test.astype('float32') # 归一化,将值映射到 0到1区间 x_train /= 255 x_test /= 255 # 因为是10分类,所以将类别向量(从0到10的整数向量)映射为二值类别矩阵,相当于将向量用one-hot重新编码 y_train = np_utils.to_categorical(y_train, 10) y_test = np_utils.to_categorical(y_test, 10) # 网络配置文件 config = { # 初始学习率 "learning_rate": 0.001, # 优化策略: sgd/momentum/rmsprop/adam "optimizer": "adam", # 使用动量的梯度下降算法做优化,可以设置这一项,默认值为 0.9 ,一般不需要调整 "momentum_coefficient": 0.9, # 训练多少次 "number_iteration": 1000, # 每次用多少个样本训练 "batch_size": 64, # 迭代多少次打印一次信息 "display": 100, } # 网络结构,数据将从上往下传播 net = [ { # 层名,无限制 "name": "flatten_1", # 层类型,将数据展平为适合神经网络的结构,用于输入层或者卷积层和全连接层中间。 (60000, 1, 28, 28) ——> (784, 60000) "type": "flatten" }, { # 层名 "name": "fully_connected_1", # 层类型,全连接层 "type": "fully_connected", # 神经元个数 "neurons_number": 256, # 权重初始化方式 msra/xavier/gaussian "weight_init": "msra" }, { # 层名 "name": "batch_normalization_1", # 标准化层,将上一层的数据转化为均值接近0,标准差接近1的转换。可以一定程度解决梯度消失、数据不稳定的问题 "type": "batch_normalization" }, { # 层名 "name": "relu_1", # 层类型(激活层) 可选,relu,sigmoid,tanh, "type": "relu" }, { # 层名 "name": "fully_connected_2", # 层类型,全连接层 "type": "fully_connected", # 神经元个数, 因为是10分类,所以神经元个数为10 "neurons_number": 10, # 权重初始化方式 msra/xavier/gaussian "weight_init": "msra" }, { # 层名 "name": "softmax_1", # 层类型,分类层,最终输出十分类的概率分布 "type": "softmax" } ] # 定义模型,传入网络结构和配置项 AA = AADeepLearning(net=net, config=config) # 训练模型 AA.train(x_train=x_train, y_train=y_train) # 使用测试集预测,返回概率分布和准确率, score:样本在各个分类上的概率, accuracy:准确率 score, accuracy = AA.predict(x_test=x_test, y_test=y_test) print("test set accuracy:", accuracy)
PypiClean
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/meter_category_response_py3.py
from msrest.serialization import Model class MeterCategoryResponse(Model): """MeterCategoryResponse. :param meter_group_category_id: The meter category identifier :type meter_group_category_id: int :param meter_group_category_code: The meter category code :type meter_group_category_code: str :param meter_group_category_info: The meter category name :type meter_group_category_info: str :param auto_group: Is this category an automatically maintained one? :type auto_group: bool :param number_of_groups: Number of groups within this category :type number_of_groups: int :param number_of_groups_with_visible_members: Number of groups within this category with members the logged in can see :type number_of_groups_with_visible_members: int """ _attribute_map = { 'meter_group_category_id': {'key': 'meterGroupCategoryId', 'type': 'int'}, 'meter_group_category_code': {'key': 'meterGroupCategoryCode', 'type': 'str'}, 'meter_group_category_info': {'key': 'meterGroupCategoryInfo', 'type': 'str'}, 'auto_group': {'key': 'autoGroup', 'type': 'bool'}, 'number_of_groups': {'key': 'numberOfGroups', 'type': 'int'}, 'number_of_groups_with_visible_members': {'key': 'numberOfGroupsWithVisibleMembers', 'type': 'int'}, } def __init__(self, *, meter_group_category_id: int=None, meter_group_category_code: str=None, meter_group_category_info: str=None, auto_group: bool=None, number_of_groups: int=None, number_of_groups_with_visible_members: int=None, **kwargs) -> None: super(MeterCategoryResponse, self).__init__(**kwargs) self.meter_group_category_id = meter_group_category_id self.meter_group_category_code = meter_group_category_code self.meter_group_category_info = meter_group_category_info self.auto_group = auto_group self.number_of_groups = number_of_groups self.number_of_groups_with_visible_members = number_of_groups_with_visible_members
PypiClean
/MDAKit-1.0.0-py3-none-any.whl/MDKit/tools/ChemicalShift/shiftx2-plot.py
import os import sys import argparse import mdtraj as md import pandas as pd import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt from scipy.stats.stats import pearsonr font = { 'font.family': 'calibri', 'font.weight': 'normal', 'font.size': 20, 'lines.linewidth':2, 'figure.dpi':600, 'figure.autolayout':True } mpl.rcParams.update(font) def random_coil(res,msm,exp,t,coil_type='v1'): ''' Ref: Wishart DS, Bigam CG, Holm A, Hodges RS, Sykes BD. 1H, 13C and 15N random coil NMR chemical shifts of the common amino acids. I. Investigations of nearest-neighbor effects. J Biomol NMR. 1995 Jan;5(1):67-81. doi: 10.1007/BF00227471. PMID: 7881273. ''' # table 2: RANDOM COIL 13C CHEMICAL SHIFTS FOR THE 20 COMMON AMINO ACIDS WHEN FOLLOWED BY ALANINE random_coilv1 = { 'CA':{ "A":52.5, "R":56.0, "N":53.1, "D":54.2, "C":58.2, "Q":55.7, "E":56.6, "G":45.1, "H":55.0, "I":61.1, "L":55.1, "K":56.2, "M":55.4, "F":57.7, "P":63.3, "S":58.3, "T":61.8, "W":57.5, "Y":57.9, "V":62.2, }, 'CB':{ "A":19.1, "R":30.9, "N":38.9, "D":41.1, "C":28.0, "Q":29.4, "E":29.9, "G":0.0, "H":29.0, "I":38.8, "L":42.4, "K":33.1, "M":32.9, "F":39.6, "P":32.1, "S":63.8, "T":69.8, "W":29.6, "Y":38.8, "V":32.9, } } # table 4: RANDOM COIL 13C CHEMICAL SHIFTS FOR THE 20 COMMON AMINO ACIDS WHEN FOLLOWED BY PROLINE random_coilv2 = { 'CA':{ "A":50.5, "R":54.0, "N":51.3, "D":52.2, "C":56.4, "Q":53.7, "E":54.2, "G":44.5, "H":53.3, "I":58.7, "L":53.1, "K":54.2, "M":53.3, "F":55.6, "P":61.5, "S":56.4, "T":59.8, "W":55.7, "Y":55.8, "V":59.8, }, 'CB':{ "A":18.1, "R":30.2, "N":38.7, "D":40.9, "C":27.1, "Q":28.8, "E":29.2, "G":0.0, "H":29.0, "I":38.7, "L":41.7, "K":32.6, "M":32.4, "F":39.1, "P":30.9, "S":63.3, "T":69.8, "W":28.9, "Y":38.3, "V":32.6, } } if coil_type == "v1": random_coil = random_coilv1[t] else: random_coil = random_coilv2[t] for i,r in enumerate(res): coilValue = random_coil[r] exp[i] -= coilValue msm[i] -= coilValue return exp,msm def parse_arg(): parser = argparse.ArgumentParser(description='Calculate chemical shift for xtc') parser.add_argument('-m', dest='msm', help="msm chemicalshift data",required=True) parser.add_argument('-e', dest='exp_f', help="experiment chemicalshift data", default='/home/ymh/mybin/BMR18601.txt') parser.add_argument('-o', dest='oup', help="floder to save png, default ='.'", default='.') parser.add_argument('-coil_type', help="random_coil data to use, v1 or v2, default:v1", default="v1", choices=['v1','v2']) parser.add_argument('-c', dest='Coil', help='Swich. Cal Random coil rather than standard cs.default=False',action="store_false" ) args = parser.parse_args() return args.msm, args.exp_f, args.oup,args.Coil,args.coil_type def main(): msm_f,exp_f,rs_p,Coil,coil_type = parse_arg() print exp_f names = ['CA','CB','N','H'] data = pd.read_csv(msm_f, sep='\s+', na_values="****")#, skiprows=1) data1 = pd.read_csv(exp_f, sep="\s+", na_values="****") for name in names: if len(data)!=len(data1): print("Two file contains difference rows!!!,exp:msm %d,%d"%(len(data1),len(data))) sys.exit() fig,ax = plt.subplots(dpi=100, figsize=(6,5)) comp = pd.DataFrame({'Num':data['Num'], 'Res':data['RES'],'msm':data[name],'exp':data1[name]}) comp = comp.dropna() msm = np.array(comp['msm']) exp = np.array(comp['exp']) res = np.array(comp['Res']) num = comp['Num'] if Coil and name == "CA": exp, msm = random_coil(res,msm,exp,name, coil_type) name = 'CA-coil%s'%coil_type if Coil and name == "CB": exp, msm = random_coil(res,msm,exp,name, coil_type) name = 'CB-coil%s'%coil_type R,Pv = pearsonr(msm,exp) ax_max = np.max(msm) ax_min = np.min(msm) ax_range = (ax_min,ax_max) y = np.arange(ax_min,ax_max+10) #fig.set_size_inches(7,6) ax.scatter(exp,msm,c='blue') ax.plot(y,y,c='red') ax.set_xlabel('Expt.(ppm)') ax.set_ylabel('Calc.(ppm)') ax.set_xlim(ax_range) ax.set_ylim(ax_range) ax.set_title("%s:%.3f"%(name,R)) fig.tight_layout() fig.savefig(os.path.join(rs_p,'%s.png'%name)) np.savetxt(os.path.join(rs_p,"%s.txt"%name), np.array([num, res, exp, msm]).T, fmt="%d %s %.4f %.4f") plt.close() if __name__ == "__main__": main()
PypiClean
/NeuroTools-0.3.1.tar.gz/NeuroTools-0.3.1/src/parameters/__init__.py
import warnings import math import numpy import operator from functools import wraps try: from urllib2 import build_opener, install_opener, urlopen, ProxyHandler # Python 2 from urlparse import urlparse except ImportError: from urllib.request import build_opener, install_opener, urlopen, ProxyHandler # Python 3 from urllib.parse import urlparse from NeuroTools.random import ParameterDist, GammaDist, UniformDist, NormalDist from os import environ, path import random from copy import copy try: basestring except NameError: basestring = str try: next # Python 3 except NameError: def next(obj): # Python 2 return obj.next() __version__ = '0.2.1' if 'HTTP_PROXY' in environ: HTTP_PROXY = environ['HTTP_PROXY'] # user has to define it ''' next lines are for communication to urllib of proxy information ''' proxy_support = ProxyHandler({"https": HTTP_PROXY}) opener = build_opener(proxy_support, HTTPHandler) install_opener(opener) def isiterable(x): return (hasattr(x, '__iter__') and not isinstance(x, basestring)) def contains_instance(collection, cls): return any(isinstance(o, cls) for o in collection) def nesteddictwalk(d, separator='.'): """ Walk a nested dict structure, using a generator. Composite keys are created by joining each key to the key of the parent dict using `separator`. """ for key1, value1 in d.items(): if isinstance(value1, dict): for key2, value2 in nesteddictwalk(value1, separator): # recurse into subdict yield "%s%s%s" % (key1, separator, key2), value2 else: yield key1, value1 def nesteddictflatten(d, separator='.'): """ Return a flattened version of a nested dict structure. Composite keys are created by joining each key to the key of the parent dict using `separator`. """ flatd = {} for k, v in nesteddictwalk(d, separator): flatd[k] = v return flatd # --- Parameters, and ranges and distributions of them ------------------- class Parameter(object): def __init__(self, value, units=None, name=""): self.name = name self.value = value self.units = units self.type = type(value) def __repr__(self): s = "%s = %s" % (self.name, self.value) if self.units is not None: s += " %s" % self.units return s class ParameterRange(Parameter): """ A class for specifying a list of possible values for a given parameter. The value must be an iterable. It acts like a Parameter, but .next() can be called to iterate through the values """ def __init__(self, value, units=None, name="", shuffle=False): if not isiterable(value): raise TypeError("A ParameterRange value must be iterable") Parameter.__init__(self, next(value.__iter__()), units, name) self._values = copy(value) self._iter_values = self._values.__iter__() if shuffle: random.shuffle(self._values) def __repr__(self): units_str = '' if self.units: units_str = ', units="%s"' % self.units return 'ParameterRange(%s%s)' % (self._values.__repr__(), units_str) def __iter__(self): self._iter_values = self._values.__iter__() return self._iter_values def __next__(self): self._value = next(self._iter_values) return self._value def next(self): return self.__next__() def __len__(self): return len(self._values) def __eq__(self, o): if (type(self) == type(o) and self.name == o.name and self._values == o._values and self.units == o.units): return True else: return False # --- ReferenceParameter def reverse(func): """Given a function f(a, b), returns f(b, a)""" @wraps(func) def reversed_func(a, b): return func(b, a) reversed_func.__doc__ = "Reversed argument form of %s" % func.__doc__ reversed_func.__name__ = "reversed %s" % func.__name__ return reversed_func def lazy_operation(name, reversed=False): def op(self, val): f = getattr(operator, name) if reversed: f = reverse(f) self.operations.append((f, val)) return self return op class ParameterReference(object): """ This class provides a place-holder for a reference parameter that will later be replaced with the value of the parameter pointed to by the reference. This class also allows for lazy application of operations, meaning that one can use the reference in simple formulas that will get evaluated at the moment the reference is replaced. Check below which operations are supported. """ def __init__(self,reference): object.__init__(self) self.reference_path = reference self.operations = [] def _apply_operations(self, x): for f, arg in self.operations: try: if arg is None: x = f(x) else: x = f(x, arg) except TypeError: raise TypeError("ParameterReference: error applying operation " + str(f) + " with argument " + str(arg) + " to " + str(x)) return x def evaluate(self,parameter_set): """ This function evaluetes the reference, using the ParameterSet in parameter_set as the source. """ ref_value = parameter_set[self.reference_path] if isinstance(ref_value,ParameterSet): if self.operations == []: return ref_value.tree_copy() else: raise ValueError("ParameterReference: lazy operations cannot be applied to argument of type ParameterSet> %s" % self.reference_path) elif isinstance(ref_value,ParameterReference): #lets wait until the refe return self else: return self._apply_operations(ref_value) def copy(self): pr = ParameterReference(self.reference_path) for f, arg in self.operations: if isinstance(arg,ParameterReference): pr.operations.append((f,arg.copy())) else: pr.operations.append((f,arg)) return pr __add__ = lazy_operation('add') __radd__ = __add__ __sub__ = lazy_operation('sub') __rsub__ = lazy_operation('sub', reversed=True) __mul__ = lazy_operation('mul') __rmul__ = __mul__ __div__ = lazy_operation('div') __rdiv__ = lazy_operation('div', reversed=True) __truediv__ = lazy_operation('truediv') __rtruediv__ = lazy_operation('truediv', reversed=True) __pow__ = lazy_operation('pow') def load_parameters(parameter_url, modified_parameters): """ This is a function that should be used to load a ParameterSet from a url. `modified_parameters` should be a dictionary of parameters and their values. These will be replaced in the loaded parameter set before the references are expanded. """ parameters = ParameterSet(parameter_url) parameters.replace_values(**modified_parameters) parameters.replace_references() return parameters class ParameterSet(dict): """ A class to manage hierarchical parameter sets. Usage example:: >>> sim_params = ParameterSet({'dt': 0.1, 'tstop': 1000.0}) >>> exc_cell_params = ParameterSet("http://neuralensemble.org/svn/NeuroTools/example.params") >>> inh_cell_params = ParameterSet({'tau_m': 15.0, 'cm': 0.5}) >>> network_params = ParameterSet({'excitatory_cells': exc_cell_params, 'inhibitory_cells': inh_cell_params}) >>> P = ParameterSet({'sim': sim_params, 'network': network_params}) >>> P.sim.dt 0.1 >>> P.network.inhibitory_cells.tau_m 15.0 >>> print P.pretty() """ non_parameter_attributes = ['_url', 'label', 'names', 'parameters', 'flat', 'flatten', 'non_parameter_attributes'] invalid_names = ['parameters', 'names'] # should probably add dir(dict) @staticmethod def read_from_str(s, update_namespace=None): """ `ParameterSet` definition `s` should be a Python dict definition string, containing objects of types `int`, `float`, `str`, `list`, `dict` plus the classes defined in this module, `Parameter`, `ParameterRange`, etc. No other object types are allowed, except the function `url('some_url')` or `ref('point.delimited.path')`, e.g.:: { 'a' : {'A': 3, 'B': 4}, 'b' : [1,2,3], 'c' : 'hello world', 'd' : url('http://example.com/my_cool_parameter_set') 'e' : ref('level1_param_name.level2_param_name.level3_param_name') } This is largely the JSON (www.json.org) format, but with extra keywords in the namespace such as `ParameterRange`, `GammaDist`, etc. """ global_dict = dict(ref=ParameterReference, url=ParameterSet, ParameterSet=ParameterSet, ParameterRange=ParameterRange, ParameterTable=ParameterTable, GammaDist=GammaDist, UniformDist=UniformDist, NormalDist=NormalDist, pi=math.pi, true=True, # these are for reading JSON false=False, # files ) if update_namespace: global_dict.update(update_namespace) D = None try: if 'file://' in s: path = s.split('file://')[1] ifile = open(path, 'r') content = ifile.read() ifile.close() D = eval(content, global_dict) else: D = eval(s, global_dict) except SyntaxError as e: raise SyntaxError( "Invalid string for ParameterSet definition: %s\n%s" % (s, e)) except TypeError as e: raise SyntaxError( "Invalid string for ParameterSet definition: %s" % e) return D or {} @staticmethod def check_validity(k): """docstring missing""" if k in ParameterSet.invalid_names: raise Exception("'%s' is not allowed as a parameter name." % k) def __init__(self, initialiser, label=None, update_namespace=None): def walk(d, label): # Iterate through the dictionary `d`, replacing `dict`s by # `ParameterSet` objects. for k, v in d.items(): ParameterSet.check_validity(k) if isinstance(v, ParameterSet): d[k] = v elif isinstance(v, dict): d[k] = walk(v, k) else: d[k] = v return ParameterSet(d, label) self._url = None if isinstance(initialiser, basestring): # url or str if path.exists(initialiser): f = open(initialiser, 'r') pstr = f.read() self._url = initialiser f.close() else: try: f = urlopen(initialiser) pstr = f.read().decode() self._url = initialiser except IOError as e: pstr = initialiser self._url = None else: f.close() # is it a yaml url? if self._url: o = urlparse(self._url) base, ext = path.splitext(o.path) if ext in ['.yaml', '.yml']: import yaml initialiser = yaml.load(pstr) else: initialiser = ParameterSet.read_from_str(pstr, update_namespace) else: initialiser = ParameterSet.read_from_str(pstr, update_namespace) # By this stage, `initialiser` should be a dict. Iterate through it, # copying its contents into the current instance, and replacing dicts by # ParameterSet objects. if isinstance(initialiser, dict): for k, v in initialiser.items(): ParameterSet.check_validity(k) if isinstance(v, ParameterSet): self[k] = v elif isinstance(v, dict): self[k] = walk(v, k) else: self[k] = v else: raise TypeError( "`initialiser` must be a `dict`, a `ParameterSet` object, a string, or a valid URL") # Set the label if hasattr(initialiser, 'label'): self.label = label or initialiser.label # if initialiser was a ParameterSet, keep the existing label if the label arg is None else: self.label = label # Define some aliases, allowing, e.g.: # for name, value in P.parameters(): # for name in P.names(): self.names = self.keys self.parameters = self.items def flat(self): __doc__ = nesteddictwalk.__doc__ return nesteddictwalk(self) def flatten(self): __doc__ = nesteddictflatten.__doc__ return nesteddictflatten(self) def __getattr__(self, name): """Allow accessing parameters using dot notation.""" try: return self[name] except KeyError: return self.__getattribute__(name) def __setattr__(self, name, value): """Allow setting parameters using dot notation.""" if name in self.non_parameter_attributes: object.__setattr__(self, name, value) else: # should we check the parameter type hasn't changed? self[name] = value def __getitem__(self, name): """ Modified get that detects dots '.' in the names and goes down the nested tree to find it""" split = name.split('.', 1) if len(split) == 1: return dict.__getitem__(self, name) # nested get return dict.__getitem__(self, split[0])[split[1]] def flat_add(self, name, value): """ Like `__setitem__`, but it will add `ParameterSet({})` objects into the namespace tree if needed. """ split = name.split('.', 1) if len(split) == 1: dict.__setitem__(self, name, value) else: # nested set try: ps = dict.__getitem__(self, split[0]) except KeyError: # setting nested name without parent existing # create parent ps = ParameterSet({}) dict.__setitem__(self, split[0], ps) # and try again ps.flat_add(split[1], value) def __setitem__(self, name, value): """ Modified set that detects dots '.' in the names and goes down the nested tree to set it """ split = name.split('.', 1) if len(split) == 1: dict.__setitem__(self, name, value) else: # nested set dict.__getitem__(self, split[0])[split[1]] = value def update(self, E, **F): """docstring missing""" if hasattr(E, "has_key"): for k in E: self[k] = E[k] else: for (k, v) in E: self[k] = v for k in F: self[k] = F[k] # should __len__() be the usual dict length, or the flattened length? Probably the former for consistency with dicts # can always use len(ps.flatten()) # what about __contains__()? Should we drill down to lower levels in the # hierarchy? I think so. def __getstate__(self): """For pickling.""" return self def save(self, url=None, expand_urls=False): """ Write the parameter set to a text file. The text file syntax is open to discussion. My idea is that it should be valid Python code, preferably importable as a module. If `url` is `None`, try to save to `self._url` (if it is not `None`), otherwise save to `url`. """ # possible solution for HTTP PUT: http://inamidst.com/proj/put/put.py if not url: url = self._url assert url != '' if not self._url: self._url = url scheme, netloc, path, parameters, query, fragment = urlparse(url) if scheme == 'file' or (scheme == '' and netloc == ''): f = open(path, 'w') f.write(self.pretty(expand_urls=expand_urls)) f.close() else: if scheme: raise Exception( "Saving using the %s protocol is not implemented" % scheme) else: raise Exception("No protocol (http, ftp, etc) specified.") def pretty(self, indent=' ', expand_urls=False): """ Return a unicode string representing the structure of the `ParameterSet`. evaluating the string should recreate the object. """ def walk(d, indent, ind_incr): s = [] for k, v in d.items(): if hasattr(v, 'items'): if expand_urls is False and hasattr(v, '_url') and v._url: s.append('%s"%s": url("%s"),' % (indent, k, v._url)) else: s.append('%s"%s": {' % (indent, k)) s.append(walk(v, indent+ind_incr, ind_incr)) s.append('%s},' % indent) elif isinstance(v, basestring): s.append('%s"%s": "%s",' % (indent, k, v)) else: # what if we have a dict or ParameterSet inside a list? currently they are not expanded. Should they be? s.append('%s"%s": %s,' % (indent, k, v)) return '\n'.join(s) return '{\n' + walk(self, indent, indent) + '\n}' def tree_copy(self): """Return a copy of the `ParameterSet` tree structure. Nodes are not copied, but re-referenced.""" tmp = ParameterSet({}) for key in self: value = self[key] if isinstance(value, ParameterSet): tmp[key] = value.tree_copy() elif isinstance(value,ParameterReference): tmp[key] = value.copy() else: tmp[key] = value if tmp._is_space(): tmp = ParameterSpace(tmp) return tmp def as_dict(self): """Return a copy of the `ParameterSet` tree structure as a nested dictionary""" tmp = {} for key in self: value = self[key] if isinstance(value, ParameterSet): # recurse tmp[key] = value.as_dict() else: tmp[key] = value return tmp def __sub__(self, other): """ Return the difference between this `ParameterSet` and another. Not yet properly implemented. """ self_keys = set(self) other_keys = set(other) intersection = self_keys.intersection(other_keys) difference1 = self_keys.difference(other_keys) difference2 = other_keys.difference(self_keys) result1 = dict([(key, self[key]) for key in difference1]) result2 = dict([(key, other[key]) for key in difference2]) # Now need to check values for intersection.... for item in intersection: if isinstance(self[item], ParameterSet): d1, d2 = self[item] - other[item] if d1: result1[item] = d1 if d2: result2[item] = d2 elif self[item] != other[item]: result1[item] = self[item] result2[item] = other[item] if len(result1) + len(result2) == 0: assert self == other, "Error in ParameterSet.diff()" return result1, result2 def _is_space(self): """ Check for the presence of `ParameterRanges` or `ParameterDists` to determine if this is a `ParameterSet` or a `ParameterSpace`. """ for k, v in self.flat(): if isinstance(v, ParameterRange) or isinstance(v, ParameterDist): return True return False def export(self, filename, format='latex', **kwargs): """ docstring missing """ if format == 'latex': from .export import parameters_to_latex parameters_to_latex(filename, self, **kwargs) def replace_references(self): while True: refs = self.find_references() if len(refs) == 0: break for s, k, v in refs: s[k] = v.evaluate(self) def find_references(self): l = [] for k, v in self.items(): if isinstance(v, ParameterReference): l += [(self, k, v)] elif isinstance(v, ParameterSet): l += v.find_references() return l def replace_values(self,**args): """ This expects its arguments to be in the form path=value, where path is a . (dot) delimited path to a parameter in the parameter tree rooted in this ParameterSet instance. This function replaces the values of each parameter in the args with the corresponding values supplied in the arguments. """ for k in args.keys(): self[k] = args[k] class ParameterSpace(ParameterSet): """ A collection of `ParameterSets`, representing multiple points in parameter space. Created by putting `ParameterRange` and/or `ParameterDist` objects within a `ParameterSet`. """ def iter_range_key(self, range_key): """ An iterator of the `ParameterSpace` which yields the `ParameterSet` with the `ParameterRange` given by `range_key` replaced with each of its values""" tmp = self.tree_copy() for val in self[range_key]: tmp[range_key] = val yield tmp def iter_inner_range_keys(self, keys, copy=False): """ An iterator of the `ParameterSpace` which yields `ParameterSets` with all combinations of `ParameterRange` elements which are given by the `keys` list. Note: each newly yielded value is one and the same object so storing the returned values results in a collection of many of the lastly yielded object. `copy=True` causes each yielded object to be a newly created object, but be careful because this is spawning many dictionaries! """ if len(keys) == 0: # return an iterator over 1 copy for modifying yield self.tree_copy() return if not copy: # recursively iterate over remaining keys for tmp in self.iter_inner_range_keys(keys[1:]): # iterator over range of our present attention for val in self[keys[0]]: tmp[keys[0]] = val if not tmp._is_space(): tmp = ParameterSet(tmp) yield tmp else: # Each yielded ParameterSet is a tree_copy of self # recursively iterate over remaining keys for tmp in self.iter_inner_range_keys(keys[1:]): # iterator over range of our present attention for val in self[keys[0]]: tmp_copy = tmp.tree_copy() tmp_copy[keys[0]] = val if not tmp_copy._is_space(): tmp = ParameterSet(tmp) yield tmp_copy def range_keys(self): """Return the list of keys for those elements which are `ParameterRanges`.""" return [key for key, value in self.flat() if isinstance(value, ParameterRange)] def iter_inner(self, copy=False): """An iterator of the `ParameterSpace` which yields `ParameterSets` with all combinations of `ParameterRange` elements""" return self.iter_inner_range_keys(self.range_keys(), copy) def num_conditions(self): """Return the number of `ParameterSets` that will be returned by the `iter_inner()` method.""" # Not properly tested n = 1 for key in self.range_keys(): n *= len(self[key]) return n def dist_keys(self): """Return the list of keys for those elements which are `ParameterDists`.""" def is_or_contains_dist(value): return isinstance(value, ParameterDist) or ( isiterable(value) and contains_instance(value, ParameterDist)) return [key for key, value in self.flat() if is_or_contains_dist(value)] def realize_dists(self, n=1, copy=False): """For each `ParameterDist`, realize the distribution and yield the result. If `copy==True`, causes each yielded object to be a newly created object, but be careful because this is spawning many dictionaries!""" def next(item, n): if isinstance(item, ParameterDist): return item.next(n) else: return [item]*n # pre-generate random numbers rngs = {} for key in self.dist_keys(): if isiterable(self[key]): rngs[key] = [next(item, n) for item in self[key]] else: rngs[key] = self[key].next(n) # get a copy to fill in the rngs if copy: tmp = self.tree_copy() for i in range(n): for key in rngs: if isiterable(self[key]): tmp[key] = [rngs[key][j][i] for j in range(len(rngs[key]))] else: tmp[key] = rngs[key][i] yield tmp.tree_copy() else: tmp = self.tree_copy() for i in range(n): for key in rngs: if isiterable(self[key]): tmp[key] = [rngs[key][j][i] for j in range(len(rngs[key]))] else: tmp[key] = rngs[key][i] yield tmp def parameter_space_dimension_labels(self): """ Return the dimensions and labels of the keys for those elements which are `ParameterRanges`. `range_keys` are sorted to ensure the same ordering each time. """ range_keys = self.range_keys() range_keys.sort() dim = [] label = [] for key in range_keys: label.append(key) dim.append(len(eval('self.'+key))) return dim, label def parameter_space_index(self, current_experiment): """ Return the index of the current experiment in the dimension of the parameter space i.e. parameter space dimension: [2,3] i.e. index: (1,0) Example:: p = ParameterSet({}) p.b = ParameterRange([1,2,3]) p.a = ParameterRange(['p','y','t','h','o','n']) results_dim, results_label = p.parameter_space_dimension_labels() results = numpy.empty(results_dim) for experiment in p.iter_inner(): index = p.parameter_space_index(experiment) results[index] = 2. """ index = [] range_keys = self.range_keys() range_keys.sort() for key in range_keys: value = eval('current_experiment.'+key) try: value_index = list(eval('self.'+key)._values).index(value) except ValueError: raise ValueError( "The ParameterSet provided is not within the ParameterSpace") index.append(value_index) return tuple(index) def get_ranges_values(self): """ Return a dict with the keys and values of the parameters with `ParameterRanges` Example:: >>> p = ParameterSpace({}) >>> p.b = ParameterRange([1,2,3]) >>> p.a = ParameterRange(['p','y','t','h','o','n']) >>> data = p.get_ranges_values() >>> data {'a': ['p', 'y', 't', 'h', 'o', 'n'], 'b': [1, 2, 3]} """ data = {} range_keys = self.range_keys() range_keys.sort() for key in range_keys: data[key] = eval('self.'+key)._values return data def string_table(tablestring): """Convert a table written as a multi-line string into a dict of dicts.""" tabledict = {} rows = tablestring.strip().split('\n') column_headers = rows[0].split() for row in rows[1:]: row = row.split() row_header = row[0] tabledict[row_header] = {} for col_header, item in zip(column_headers[1:], row[1:]): tabledict[row_header][col_header] = float(item) return tabledict class ParameterTable(ParameterSet): """ A sub-class of `ParameterSet` that can represent a table of parameters. i.e., it is limited to one-level of nesting, and each sub-dict must have the same keys. In addition to the possible initialisers for ParameterSet, a ParameterTable can be initialised from a multi-line string, e.g.:: >>> pt = ParameterTable(''' ... # col1 col2 col3 ... row1 1 2 3 ... row2 4 5 6 ... row3 7 8 9 ... ''') >>> pt.row2.col3 6.0 >>> pt.column('col1') {'row1': 1.0, 'row2': 4.0, 'row3': 7.0} >>> pt.transpose().col3.row2 6.0 """ non_parameter_attributes = ParameterSet.non_parameter_attributes + \ ['row', 'rows', 'row_labels', 'column', 'columns', 'column_labels'] def __init__(self, initialiser, label=None): if isinstance(initialiser, basestring): # url or table string tabledict = string_table(initialiser) # if initialiser is a URL, string_table() should return an empty dict # since URLs do not contain spaces. if tabledict: # string table initialiser = tabledict ParameterSet.__init__(self, initialiser, label) # Now need to check that the contents actually define a table, i.e. # two levels of nesting and each sub-dict has the same keys self._check_is_table() self.rows = self.items # self.rows.__doc__ = "Return a list of (row_label, row) pairs, as 2-tuples.""" self.row_labels = self.keys # self.row_labels.__doc__ = "Return a list of row labels." def _check_is_table(self): """ Checks that the contents actually define a table, i.e. one level of nesting and each sub-dict has the same keys. Raises an `Exception` if these requirements are violated. """ # to be implemented pass def row(self, row_label): """Return a `ParameterSet` object containing the requested row.""" return self[row_label] def column(self, column_label): """Return a `ParameterSet` object containing the requested column.""" col = {} for row_label, row in self.rows(): col[row_label] = row[column_label] return ParameterSet(col) def columns(self): """Return a list of `(column_label, column)` pairs, as 2-tuples.""" return [(column_label, self.column(column_label)) for column_label in self.column_labels()] def column_labels(self): """Return a list of column labels.""" sample_row = self[list(self.row_labels())[0]] return sample_row.keys() def transpose(self): """ Return a new `ParameterTable` object with the same data as the current one but with rows and columns swapped. """ new_table = ParameterTable({}) for column_label, column in self.columns(): new_table[column_label] = column return new_table def table_string(self): """ Returns the table as a string, suitable for being used as the initialiser for a new `ParameterTable`. """ # formatting could definitely be improved column_labels = self.column_labels() lines = ["#\t " + "\t".join(column_labels)] for row_label, row in self.rows(): lines.append( row_label + "\t" + "\t".join(["%s" % row[col] for col in column_labels])) return "\n".join(lines)
PypiClean
/ETLT-0.9.6.tar.gz/ETLT-0.9.6/etlt/writer/SqlLoaderWriter.py
import abc from etlt.writer.Writer import Writer class SqlLoaderWriter(Writer): """ Abstract parent class for loading rows to a table in a database using a SQL statement for loading data from file. """ handlers = {} """ The handlers for writing objects as a field to a CSV file. :type: dict[str,callable] """ # ------------------------------------------------------------------------------------------------------------------ def __init__(self, filename, encoding='utf8'): """ Object constructor. :param str filename: The destination file for the rows. :param str encoding: The encoding of the text of the destination file. """ Writer.__init__(self) self._filename = filename """ The name of the destination file. :type: str """ self._encoding = encoding """ The encoding of the text in the destination file. :type: str """ self._file = None """ The underling file object. :type: T """ # ------------------------------------------------------------------------------------------------------------------ def __enter__(self): self._file = open(self._filename, mode='wt', encoding=self._encoding) # ------------------------------------------------------------------------------------------------------------------ def __exit__(self, exc_type, exc_value, traceback): self._file.close() # ------------------------------------------------------------------------------------------------------------------ @property def filename(self): """ Getter for filename. :rtype: str """ return self._filename # ------------------------------------------------------------------------------------------------------------------ @property def encoding(self): """ Getter for encoding. :rtype: str """ return self._encoding # ------------------------------------------------------------------------------------------------------------------ @abc.abstractmethod def get_bulk_load_sql(self, table_name): """ Returns a SQL statement for bulk loading the data writen to the destination file into a table. :param str table_name: The name of the table. :rtype: str """ raise NotImplementedError() # ------------------------------------------------------------------------------------------------------------------ @staticmethod def register_handler(class_name, handler): """ Registers a handler for writing instances of a class as a field to the destination file. :param str class_name: The name of the class. :param callable handler: The handler. This handler will be called with two arguments: the object which value must be writen to the destination file, the file handler. """ SqlLoaderWriter.handlers[class_name] = handler # ------------------------------------------------------------------------------------------------------------------ def _write_field(self, value): """ Write a single field to the destination file. :param T value: The value of the field. """ class_name = str(value.__class__) if class_name not in self.handlers: raise ValueError('No handler has been registered for class: {0!s}'.format(class_name)) handler = self.handlers[class_name] handler(value, self._file) # ----------------------------------------------------------------------------------------------------------------------
PypiClean
/bareon-0.0.1a3.tar.gz/bareon-0.0.1a3/bareon/drivers/data/generic.py
import os from oslo_config import cfg from bareon import errors from bareon.utils import utils from bareon.drivers.data.base import BaseDataDriver from bareon.drivers.data.base import ConfigDriveDataDriverMixin from bareon.drivers.data.base import GrubBootloaderDataDriverMixin from bareon.drivers.data.base import MultibootDeploymentMixin from bareon.drivers.data.base import PartitioningDataDriverMixin from bareon.drivers.data.base import ProvisioningDataDriverMixin opts = [ cfg.StrOpt( 'config_drive_path', default='/tmp/config-drive.img', help='Path where to store generated config drive image', ), ] CONF = cfg.CONF CONF.register_opts(opts) # TODO(lobur): This driver mostly copies nailgun driver. Need to merge them. class GenericDataDriver(BaseDataDriver, PartitioningDataDriverMixin, ProvisioningDataDriverMixin, ConfigDriveDataDriverMixin, GrubBootloaderDataDriverMixin, MultibootDeploymentMixin): def __init__(self, data): super(GenericDataDriver, self).__init__(data) # this var states whether boot partition # was already allocated on first matching volume # or not self._boot_partition_done = False # this var is used as a flag that /boot fs # has already been added. we need this to # get rid of md over all disks for /boot partition. self._boot_done = False @property def partition_scheme(self): if not hasattr(self, '_partition_scheme'): self._partition_scheme = self._get_partition_scheme() return self._partition_scheme @property def hw_partition_scheme(self): raise NotImplementedError @property def partitions_policy(self): """Returns string""" raise NotImplementedError @property def image_scheme(self): if not hasattr(self, '_image_scheme'): self._image_scheme = self._get_image_scheme() return self._image_scheme @property def image_meta(self): if not hasattr(self, '_image_meta'): self._image_meta = self._get_image_meta() return self._image_meta @property def grub(self): if not hasattr(self, '_grub'): self._grub = self._get_grub() return self._grub @property def operating_system(self): if not hasattr(self, '_operating_system'): self._operating_system = self._get_operating_system() return self._operating_system @property def configdrive_scheme(self): if not hasattr(self, '_configdrive_scheme'): self._configdrive_scheme = self._get_configdrive_scheme() return self._configdrive_scheme @property def is_configdrive_needed(self): raise NotImplementedError def create_configdrive(self): if self.is_configdrive_needed: self._create_configdrive() def _get_partition_scheme(self): raise NotImplementedError def _get_image_scheme(self): raise NotImplementedError def _get_image_meta(self): raise NotImplementedError def _get_grub(self): raise NotImplementedError def _get_operating_system(self): raise NotImplementedError def _get_configdrive_scheme(self): raise NotImplementedError def _create_configdrive(self): raise NotImplementedError def _add_configdrive_image(self): configdrive_device = self.partition_scheme.configdrive_device() if configdrive_device is None: raise errors.WrongPartitionSchemeError( 'Error while trying to get configdrive device: ' 'configdrive device not found') size = os.path.getsize(CONF.config_drive_path) md5 = utils.calculate_md5(CONF.config_drive_path, size) self.image_scheme.add_image( uri='file://%s' % CONF.config_drive_path, target_device=configdrive_device, format='iso9660', container='raw', size=size, md5=md5, ) @property def _ks_disks(self): return filter(lambda x: x['type'] == 'disk' and x['size'] > 0, self._partition_data()) @property def _ks_vgs(self): return filter(lambda x: x['type'] == 'vg', self._partition_data()) def _getlabel(self, label): if not label: return '' # XFS will refuse to format a partition if the # disk label is > 12 characters. return ' -L {0} '.format(label[:12]) @property def _small_ks_disks(self): """Get those disks which are smaller than 2T""" return [d for d in self._ks_disks if d['size'] <= 2 * 1024 * 1024] def get_os_ids(self): raise NotImplementedError
PypiClean
/Newgram-0.0.5.tar.gz/Newgram-0.0.5/newgram/methods/chats/set_chat_permissions.py
from typing import Union import newgram from newgram import raw from newgram import types class SetChatPermissions: async def set_chat_permissions( self: "newgram.Client", chat_id: Union[int, str], permissions: "types.ChatPermissions", ) -> "types.Chat": """Set default chat permissions for all members. You must be an administrator in the group or a supergroup for this to work and must have the *can_restrict_members* admin rights. .. include:: /_includes/usable-by/users-bots.rst Parameters: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. permissions (:obj:`~newgram.types.ChatPermissions`): New default chat permissions. Returns: :obj:`~newgram.types.Chat`: On success, a chat object is returned. Example: .. code-block:: python from newgram.types import ChatPermissions # Completely restrict chat await app.set_chat_permissions(chat_id, ChatPermissions()) # Chat members can only send text messages and media messages await app.set_chat_permissions( chat_id, ChatPermissions( can_send_messages=True, can_send_media_messages=True ) ) """ r = await self.invoke( raw.functions.messages.EditChatDefaultBannedRights( peer=await self.resolve_peer(chat_id), banned_rights=raw.types.ChatBannedRights( until_date=0, send_messages=not permissions.can_send_messages, send_media=not permissions.can_send_media_messages, send_stickers=not permissions.can_send_other_messages, send_gifs=not permissions.can_send_other_messages, send_games=not permissions.can_send_other_messages, send_inline=not permissions.can_send_other_messages, embed_links=not permissions.can_add_web_page_previews, send_polls=not permissions.can_send_polls, change_info=not permissions.can_change_info, invite_users=not permissions.can_invite_users, pin_messages=not permissions.can_pin_messages, ) ) ) return types.Chat._parse_chat(self, r.chats[0])
PypiClean
/Braindecode-0.7.tar.gz/Braindecode-0.7/braindecode/visualization/confusion_matrices.py
import numpy as np import matplotlib.pyplot as plt from matplotlib import cm def plot_confusion_matrix(confusion_mat, class_names=None, figsize=None, colormap=cm.bwr, textcolor='black', vmin=None, vmax=None, fontweight='normal', rotate_row_labels=90, rotate_col_labels=0, with_f1_score=False, norm_axes=(0, 1), rotate_precision=False, class_names_fontsize=12): """ Generates a confusion matrix with additional precision and sensitivity metrics as in [1]_. Parameters ---------- confusion_mat: 2d numpy array A confusion matrix, e.g. sklearn confusion matrix: https://scikit-learn.org/stable/modules/generated/sklearn.metrics.confusion_matrix.html class_names: array, optional List of classes/targets. figsize: tuple, optional Size of the generated confusion matrix figure. colormap: matplotlib cm colormap, optional textcolor: str, optional Color of the text in the figure. vmin, vmax: float, optional The data range that the colormap covers. fontweight: str, optional Weight of the font in the figure: [ 'normal' | 'bold' | 'heavy' | 'light' | 'ultrabold' | 'ultralight'] rotate_row_labels: int, optional The rotation angle of the row labels rotate_col_labels: int, optional The rotation angle of the column labels with_f1_score: bool, optional norm_axes: tuple, optional rotate_precision: bool, optional class_names_fontsize: int, optional Returns ------- fig: matplotlib figure References ---------- .. [1] Schirrmeister, R. T., Springenberg, J. T., Fiederer, L. D. J., Glasstetter, M., Eggensperger, K., Tangermann, M., Hutter, F. & Ball, T. (2017). Deep learning with convolutional neural networks for EEG decoding and visualization. Human Brain Mapping , Aug. 2017. Online: http://dx.doi.org/10.1002/hbm.23730 """ # transpose to get confusion matrix same way as matlab confusion_mat = confusion_mat.T n_classes = confusion_mat.shape[0] if class_names is None: class_names = [str(i_class + 1) for i_class in range(n_classes)] # norm by all targets normed_conf_mat = confusion_mat / np.float32(np.sum(confusion_mat, axis=norm_axes, keepdims=True)) fig = plt.figure(figsize=figsize) plt.clf() ax = fig.add_subplot(111) ax.set_aspect(1) if vmin is None: vmin = np.min(normed_conf_mat) if vmax is None: vmax = np.max(normed_conf_mat) # see http://stackoverflow.com/a/31397438/1469195 # brighten so that black text remains readable # used alpha=0.6 before def _brighten(x, ): brightened_x = 1 - ((1 - np.array(x)) * 0.4) return brightened_x brightened_cmap = _cmap_map(_brighten, colormap) # colormap # ax.imshow(np.array(normed_conf_mat), cmap=brightened_cmap, interpolation='nearest', vmin=vmin, vmax=vmax) # make space for precision and sensitivity plt.xlim(-0.5, normed_conf_mat.shape[0] + 0.5) plt.ylim(normed_conf_mat.shape[1] + 0.5, -0.5) width = len(confusion_mat) height = len(confusion_mat[0]) for x in range(width): for y in range(height): if x == y: this_font_weight = 'bold' else: this_font_weight = fontweight annotate_str = "{:d}".format(confusion_mat[x][y]) annotate_str += "\n" ax.annotate(annotate_str.format(confusion_mat[x][y]), xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=12, color=textcolor, fontweight=this_font_weight) if x != y or (not with_f1_score): ax.annotate( "\n\n{:4.1f}%".format( normed_conf_mat[x][y] * 100), xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=10, color=textcolor, fontweight=this_font_weight) else: assert x == y precision = confusion_mat[x][x] / float(np.sum( confusion_mat[x, :])) sensitivity = confusion_mat[x][x] / float(np.sum( confusion_mat[:, y])) f1_score = 2 * precision * sensitivity / (precision + sensitivity) ax.annotate("\n{:4.1f}%\n{:4.1f}% (F)".format( (confusion_mat[x][y] / float(np.sum(confusion_mat))) * 100, f1_score * 100), xy=(y, x + 0.1), horizontalalignment='center', verticalalignment='center', fontsize=10, color=textcolor, fontweight=this_font_weight) # Add values for target correctness etc. for x in range(width): y = len(confusion_mat) if float(np.sum(confusion_mat[x, :])) == 0: annotate_str = "-" else: correctness = confusion_mat[x][x] / float(np.sum(confusion_mat[x, :])) annotate_str = "" annotate_str += "\n{:5.2f}%".format(correctness * 100) ax.annotate(annotate_str, xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=12) for y in range(height): x = len(confusion_mat) if float(np.sum(confusion_mat[:, y])) == 0: annotate_str = "-" else: correctness = confusion_mat[y][y] / float(np.sum(confusion_mat[:, y])) annotate_str = "" annotate_str += "\n{:5.2f}%".format(correctness * 100) ax.annotate(annotate_str, xy=(y, x), horizontalalignment='center', verticalalignment='center', fontsize=12) overall_correctness = np.sum(np.diag(confusion_mat)) / np.sum(confusion_mat).astype(float) ax.annotate("{:5.2f}%".format(overall_correctness * 100), xy=(len(confusion_mat), len(confusion_mat)), horizontalalignment='center', verticalalignment='center', fontsize=12, fontweight='bold') plt.xticks(range(width), class_names, fontsize=class_names_fontsize, rotation=rotate_col_labels) plt.yticks(np.arange(0, height), class_names, va='center', fontsize=class_names_fontsize, rotation=rotate_row_labels) plt.grid(False) plt.ylabel('Predictions', fontsize=15) plt.xlabel('Targets', fontsize=15) # n classes is also shape of matrix/size ax.text(-1.2, n_classes + 0.2, "Recall", ha='center', va='center', fontsize=13) if rotate_precision: rotation = 90 x_pos = -1.1 va = 'center' else: rotation = 0 x_pos = -0.8 va = 'top' ax.text(n_classes, x_pos, "Precision", ha='center', va=va, rotation=rotation, # 270, fontsize=13) return fig # see http://stackoverflow.com/a/31397438/1469195 def _cmap_map(function, cmap, name='colormap_mod', N=None, gamma=None): """ Modify a colormap using `function` which must operate on 3-element arrays of [r, g, b] values. You may specify the number of colors, `N`, and the opacity, `gamma`, value of the returned colormap. These values default to the ones in the input `cmap`. You may also specify a `name` for the colormap, so that it can be loaded using plt.get_cmap(name). """ from matplotlib.colors import LinearSegmentedColormap as lsc if N is None: N = cmap.N if gamma is None: gamma = cmap._gamma cdict = cmap._segmentdata # Cast the steps into lists: step_dict = {key: list(map(lambda x: x[0], cdict[key])) for key in cdict} # Now get the unique steps (first column of the arrays): step_dicts = np.array(list(step_dict.values())) step_list = np.unique(step_dicts) # 'y0', 'y1' are as defined in LinearSegmentedColormap docstring: y0 = cmap(step_list)[:, :3] y1 = y0.copy()[:, :3] # Go back to catch the discontinuities, and place them into y0, y1 for iclr, key in enumerate(['red', 'green', 'blue']): for istp, step in enumerate(step_list): try: ind = step_dict[key].index(step) except ValueError: # This step is not in this color continue y0[istp, iclr] = cdict[key][ind][1] y1[istp, iclr] = cdict[key][ind][2] # Map the colors to their new values: y0 = np.array(list(map(function, y0))) y1 = np.array(list(map(function, y1))) # Build the new colormap (overwriting step_dict): for iclr, clr in enumerate(['red', 'green', 'blue']): step_dict[clr] = np.vstack((step_list, y0[:, iclr], y1[:, iclr])).T # Remove alpha, otherwise crashes... step_dict.pop('alpha', None) return lsc(name, step_dict, N=N, gamma=gamma)
PypiClean
/MPoL-0.1.13.tar.gz/MPoL-0.1.13/src/mpol/utils.py
import numpy as np import torch from .constants import arcsec, c_ms, cc, deg, kB def ground_cube_to_packed_cube(ground_cube): r""" Converts a Ground Cube to a Packed Visibility Cube for visibility-plane work. See Units and Conventions for more details. Args: ground_cube: a previously initialized Ground Cube object (cube (3D torch tensor of shape ``(nchan, npix, npix)``)) Returns: torch.double : 3D image cube of shape ``(nchan, npix, npix)``; The resulting array after applying ``torch.fft.fftshift`` to the input arg; i.e Returns a Packed Visibility Cube. """ shifted = torch.fft.fftshift(ground_cube, dim=(1, 2)) return shifted def packed_cube_to_ground_cube(packed_cube): r""" Converts a Packed Visibility Cube to a Ground Cube for visibility-plane work. See Units and Conventions for more details. Args: packed_cube: a previously initialized Packed Cube object (cube (3D torch tensor of shape ``(nchan, npix, npix)``)) Returns: torch.double : 3D image cube of shape ``(nchan, npix, npix)``; The resulting array after applying ``torch.fft.fftshift`` to the input arg; i.e Returns a Ground Cube. """ # fftshift the image cube to the correct quadrants shifted = torch.fft.fftshift(packed_cube, dim=(1, 2)) return shifted def sky_cube_to_packed_cube(sky_cube): r""" Converts a Sky Cube to a Packed Image Cube for image-plane work. See Units and Conventions for more details. Args: sky_cube: a previously initialized Sky Cube object with RA increasing to the *left* (cube (3D torch tensor of shape ``(nchan, npix, npix)``)) Returns: torch.double : 3D image cube of shape ``(nchan, npix, npix)``; The resulting array after applying ``torch.fft.fftshift`` to the ``torch.flip()`` of the RA axis; i.e Returns a Packed Image Cube. """ flipped = torch.flip(sky_cube, (2,)) shifted = torch.fft.fftshift(flipped, dim=(1, 2)) return shifted def packed_cube_to_sky_cube(packed_cube): r""" Converts a Packed Image Cube to a Sky Cube for image-plane work. See Units and Conventions for more details. Args: packed_cube: a previously initialized Packed Image Cube object (cube (3D torch tensor of shape ``(nchan, npix, npix)``)) Returns: torch.double : 3D image cube of shape ``(nchan, npix, npix)``; The resulting array after applying ``torch.fft.fftshift`` to the ``torch.flip()`` of the RA axis; i.e Returns a Sky Cube. """ # fftshift the image cube to the correct quadrants shifted = torch.fft.fftshift(packed_cube, dim=(1, 2)) # flip so that east points left flipped = torch.flip(shifted, (2,)) return flipped def get_Jy_arcsec2(T_b, nu=230e9): r""" Calculate specific intensity from the brightness temperature, using the Rayleigh-Jeans definition. Args: T_b : brightness temperature in [:math:`K`] nu : frequency (in Hz) Returns: float: specific intensity (in [:math:`\mathrm{Jy}\, \mathrm{arcsec}^2]`) """ # brightness temperature assuming RJ limit # units of ergs/s/cm^2/Hz/ster I_nu = T_b * 2 * nu**2 * kB / cc**2 # convert to Jy/ster Jy_ster = I_nu * 1e23 # convert to Jy/arcsec^2 Jy_arcsec2 = Jy_ster * arcsec**2 return Jy_arcsec2 def log_stretch(x): r""" Apply a log stretch to the tensor. Args: tensor (PyTorch tensor): input tensor :math:`x` Returns: :math:`\ln(1 + |x|)` """ return torch.log(1 + torch.abs(x)) def loglinspace(start, end, N_log, M_linear=3): r""" Return a logspaced array of bin edges, with the first ``M_linear`` cells being equal width. There is a one-cell overlap between the linear and logarithmic stretches of the array, since the last linear cell is also the first logarithmic cell, which means the total number of cells is ``M_linear + N_log - 1``. Args: start (float): starting cell left edge end (float): ending cell right edge N_log (int): number of logarithmically spaced bins M_linear (int): number of linearly (equally) spaced bins """ # transition cell left edge a = end / 10 ** (N_log * np.log10(M_linear / (M_linear - 1))) delta = a / (M_linear - 1) # linear cell width # logspace = 10^(log10(a) + i * Delta) Delta = np.log10(end / a) / N_log # log cell width exponent cell_walls = [] for i in range(M_linear): cell_walls.append(start + delta * i) for j in range(1, N_log + 1): cell_walls.append(10 ** (np.log10(a) + Delta * j)) return np.array(cell_walls) def fftspace(width, N): """Delivers a (nearly) symmetric coordinate array that spans :math:`N` elements (where :math:`N` is even) from `-width` to `+width`, but ensures that the middle point lands on :math:`0`. The array indices go from :math:`0` to :math:`N -1.` Args: width (float): the width of the array N (int): the number of elements in the array Returns: numpy.float64 1D array: the fftspace array """ assert N % 2 == 0, "N must be even." dx = width * 2.0 / N xx = np.empty(N, "float") for i in range(N): xx[i] = -width + i * dx return xx def convert_baselines(baselines, freq): r""" Convert baselines in meters to kilolambda. Args: baselines (float or np.array): baselines in [m]. freq (float or np.array): frequencies in [Hz]. If either ``baselines`` or ``freq`` are numpy arrays, their shapes must be broadcast-able. Returns: (1D array nvis): baselines in [klambda] """ # calculate wavelengths in meters wavelengths = c_ms / freq # m # calculate baselines in klambda return 1e-3 * baselines / wavelengths # [klambda] def broadcast_and_convert_baselines(u, v, chan_freq): r""" Convert baselines to kilolambda and broadcast to match shape of channel frequencies. Args: u (1D array nvis): baseline [m] v (1D array nvis): baseline [m] chan_freq (1D array nchan): frequencies [Hz] Returns: (u, v) each of which are (nchan, nvis) arrays of baselines in [klambda] """ nchan = len(chan_freq) # broadcast to the same shape as the data # stub to broadcast u, v to all channels broadcast = np.ones((nchan, 1)) uu = u * broadcast vv = v * broadcast # calculate wavelengths in meters wavelengths = c_ms / chan_freq[:, np.newaxis] # m # calculate baselines in klambda uu = 1e-3 * uu / wavelengths # [klambda] vv = 1e-3 * vv / wavelengths # [klambda] return (uu, vv) def get_max_spatial_freq(cell_size, npix): r""" Calculate the maximum spatial frequency that the image can represent and still satisfy the Nyquist Sampling theorem. Args: cell_size (float): the pixel size in arcseconds npix (int): the number of pixels in the image Returns: max_freq : the maximum spatial frequency contained in the image (in kilolambda) """ # technically this is as straightforward as doing 1/(2 * cell_size), but for even-sized # arrays, the highest *positive* spatial frequency is (npix/2 - 1) / (npix * cell_size) # it is the most negative spatial frequency that goes to - 1/(2 * cell_size) return (npix / 2 - 1) / (npix * cell_size * arcsec) * 1e-3 # kilolambda def get_maximum_cell_size(uu_vv_point): r""" Calculate the maximum possible cell_size that will still Nyquist sample the uu or vv point. Note: not q point. Args: uu_vv_point (float): a single spatial frequency. Units of [:math:`\mathrm{k}\lambda`]. Returns: cell_size (in arcsec) """ return 1 / ((2 - 1) * uu_vv_point * 1e3) / arcsec def sky_gaussian_radians(l, m, a, delta_l, delta_m, sigma_l, sigma_m, Omega): r""" Calculates a 2D Gaussian on the sky plane with inputs in radians. The Gaussian is centered at ``delta_l, delta_m``, has widths of ``sigma_l, sigma_m``, and is rotated ``Omega`` degrees East of North. To evaluate the Gaussian, internally first we translate to center .. math:: l' = l - \delta_l\\ m' = m - \delta_m then rotate coordinates .. math:: l'' = l' \cos \phi - m' \sin \phi \\ m'' = l' \sin \phi + m' \cos \phi and then evaluate the Gaussian .. math:: f_\mathrm{g}(l,m) = a \exp \left ( - \frac{1}{2} \left [ \left (\frac{l''}{\sigma_l} \right)^2 + \left( \frac{m''}{\sigma_m} \right )^2 \right ] \right ) Args: l: units of [radians] m: units of [radians] a : amplitude prefactor delta_l : offset [radians] delta_m : offset [radians] sigma_l : width [radians] sigma_M : width [radians] Omega : position angle of ascending node [degrees] east of north. Returns: 2D Gaussian evaluated at input args with peak amplitude :math:`a` """ # translate lt = l - delta_l mt = m - delta_m # rotate lp = lt * np.cos(Omega * deg) - mt * np.sin(Omega * deg) mp = lt * np.sin(Omega * deg) + mt * np.cos(Omega * deg) return a * np.exp(-0.5 * ((lp / sigma_l) ** 2 + (mp / sigma_m) ** 2)) def sky_gaussian_arcsec(x, y, a, delta_x, delta_y, sigma_x, sigma_y, Omega): r""" Calculates a Gaussian on the sky plane using inputs in arcsec. This is a convenience wrapper to :func:`~mpol.utils.sky_gaussian_radians` that automatically converts from arcsec to radians. Args: x: equivalent to l, but in units of [arcsec] y: equivalent to m, but in units of [arcsec] a : amplitude prefactor delta_x : offset [arcsec] delta_y : offset [arcsec] sigma_x : width [arcsec] sigma_y : width [arcsec] Omega : position angle of ascending node [degrees] east of north. Returns: 2D Gaussian evaluated at input args with peak amplitude :math:`a` """ return sky_gaussian_radians( x * arcsec, y * arcsec, a, delta_x * arcsec, delta_y * arcsec, sigma_x * arcsec, sigma_y * arcsec, Omega, ) def fourier_gaussian_lambda_radians(u, v, a, delta_l, delta_m, sigma_l, sigma_m, Omega): r""" Calculate the Fourier plane Gaussian :math:`F_\mathrm{g}(u,v)` corresponding to the Sky plane Gaussian :math:`f_\mathrm{g}(l,m)` in :func:`~mpol.utils.sky_gaussian_radians`, using analytical relationships. The Fourier Gaussian is parameterized using the sky plane centroid (``delta_l, delta_m``), widths (``sigma_l, sigma_m``) and rotation (``Omega``). Assumes that ``a`` was in units of :math:`\mathrm{Jy}/\mathrm{steradian}`. Args: u: l in units of [lambda] v: m in units of [lambda] a : amplitude prefactor, units of :math:`\mathrm{Jy}/\mathrm{steradian}`. delta_x : offset [radians] delta_y : offset [radians] sigma_x : width [radians] sigma_y : width [radians] Omega : position angle of ascending node [degrees] east of north. Returns: 2D Gaussian evaluated at input args The following is a description of how we derived the analytical relationships. In what follows, all :math:`l` and :math:`m` coordinates are assumed to be in units of radians and all :math:`u` and :math:`v` coordinates are assumed to be in units of :math:`\lambda`. We start from Fourier dual relationships in Bracewell's `The Fourier Transform and Its Applications <https://ui.adsabs.harvard.edu/abs/2000fta..book.....B/abstract>`_ .. math:: f_0(l, m) \leftrightharpoons F_0(u, v) where the sky-plane and Fourier-plane Gaussians are .. math:: f_0(l,m) = a \exp \left ( -\pi [l^2 + m^2] \right) and .. math:: F_0(u,v) = a \exp \left ( -\pi [u^2 + v^2] \right), respectively. The sky-plane Gaussian has a maximum value of :math:`a`. We will use the similarity, rotation, and shift theorems to turn :math:`f_0` into a form matching :math:`f_\mathrm{g}`, which simultaneously turns :math:`F_0` into :math:`F_\mathrm{g}(u,v)`. The similarity theorem states that (in 1D) .. math:: f(bl) = \frac{1}{|b|}F\left(\frac{u}{b}\right). First, we scale :math:`f_0` to include sigmas. Let .. math:: f_1(l, m) = a \exp \left(-\frac{1}{2} \left [\left(\frac{l}{\sigma_l}\right)^2 + \left( \frac{m}{\sigma_m} \right)^2 \right] \right). i.e., something we might call a normalized Gaussian function. Phrased in terms of :math:`f_0`, :math:`f_1` is .. math:: f_1(l, m) = f_0\left ( \frac{l}{\sigma_l \sqrt{2 \pi}},\, \frac{m}{\sigma_m \sqrt{2 \pi}}\right). Therefore, according to the similarity theorem, the equivalent :math:`F_1(u,v)` is .. math:: F_1(u, v) = \sigma_l \sigma_m 2 \pi F_0 \left( \sigma_l \sqrt{2 \pi} u,\, \sigma_m \sqrt{2 \pi} v \right), or .. math:: F_1(u, v) = a \sigma_l \sigma_m 2 \pi \exp \left ( -2 \pi^2 [\sigma_l^2 u^2 + \sigma_m^2 v^2] \right). Next, we rotate the Gaussian to match the sky plane rotation. A rotation :math:`\Omega` in the sky plane is carried out in the same direction in the Fourier plane, .. math:: u' = u \cos \Omega - v \sin \Omega \\ v' = u \sin \Omega + v \cos \Omega such that .. math:: f_2(l, m) = f_1(l', m') \\ F_2(u, v) = F_1(u', m') Finally, we translate the sky plane Gaussian by amounts :math:`\delta_l`, :math:`\delta_m`, which corresponds to a phase shift in the Fourier plane Gaussian. The image plane translation is .. math:: f_3(l,m) = f_2(l - \delta_l, m - \delta_m) According to the shift theorem, the equivalent :math:`F_3(u,v)` is .. math:: F_3(u,v) = \exp\left (- 2 i \pi [\delta_l u + \delta_m v] \right) F_2(u,v) We have arrived at the corresponding Fourier Gaussian, :math:`F_\mathrm{g}(u,v) = F_3(u,v)`. The simplified equation is .. math:: F_\mathrm{g}(u,v) = a \sigma_l \sigma_m 2 \pi \exp \left ( -2 \pi^2 \left [\sigma_l^2 u'^2 + \sigma_m^2 v'^2 \right] - 2 i \pi \left [\delta_l u + \delta_m v \right] \right). N.B. that we have mixed primed (:math:`u'`) and unprimed (:math:`u`) coordinates in the same equation for brevity. Finally, the same Fourier dual relationship holds .. math:: f_\mathrm{g}(l,m) \leftrightharpoons F_\mathrm{g}(u,v) """ # calculate primed rotated coordinates up = u * np.cos(Omega * deg) - v * np.sin(Omega * deg) vp = u * np.sin(Omega * deg) + v * np.cos(Omega * deg) # calculate the Fourier Gaussian return ( a * sigma_l * sigma_m * 2 * np.pi * np.exp( -2 * np.pi**2 * (sigma_l**2 * up**2 + sigma_m**2 * vp**2) - 2.0j * np.pi * (delta_l * u + delta_m * v) ) ) def fourier_gaussian_klambda_arcsec(u, v, a, delta_x, delta_y, sigma_x, sigma_y, Omega): r""" Calculate the Fourier plane Gaussian :math:`F_\mathrm{g}(u,v)` corresponding to the Sky plane Gaussian :math:`f_\mathrm{g}(l,m)` in :func:`~mpol.utils.sky_gaussian_arcsec`, using analytical relationships. The Fourier Gaussian is parameterized using the sky plane centroid (``delta_l, delta_m``), widths (``sigma_l, sigma_m``) and rotation (``Omega``). Assumes that ``a`` was in units of :math:`\mathrm{Jy}/\mathrm{arcsec}^2`. Args: u: l in units of [klambda] v: m in units of [klambda] a : amplitude prefactor, units of :math:`\mathrm{Jy}/\mathrm{arcsec}^2`. delta_x : offset [arcsec] delta_y : offset [arcsec] sigma_x : width [arcsec] sigma_y : width [arcsec] Omega : position angle of ascending node [degrees] east of north. Returns: 2D Fourier Gaussian evaluated at input args """ # convert the parameters and feed to the core routine return fourier_gaussian_lambda_radians( 1e3 * u, 1e3 * v, a / arcsec**2, delta_x * arcsec, delta_y * arcsec, sigma_x * arcsec, sigma_y * arcsec, Omega, )
PypiClean
/DeepPhysX.Sofa-22.12.1.tar.gz/DeepPhysX.Sofa-22.12.1/examples/demos/Liver/UNet/validation.py
import os import sys # Sofa related imports import Sofa.Gui # DeepPhysX related imports from DeepPhysX.Sofa.Pipeline.SofaPrediction import SofaPrediction from DeepPhysX.Core.Database.BaseDatabaseConfig import BaseDatabaseConfig from DeepPhysX.Sofa.Environment.SofaEnvironmentConfig import SofaEnvironmentConfig from DeepPhysX.Torch.UNet.UNetConfig import UNetConfig # Session related imports from download import LiverDownloader LiverDownloader().get_session('run') from Environment.LiverValidation import LiverValidation from Environment.parameters import grid_resolution def create_runner(dataset_dir): # Environment config environment_config = SofaEnvironmentConfig(environment_class=LiverValidation, load_samples=dataset_dir is not None, env_kwargs={'compute_sample': dataset_dir is None}) # UNet config network_config = UNetConfig(save_each_epoch=True, input_size=grid_resolution, nb_dims=3, nb_input_channels=3, nb_first_layer_channels=128, nb_output_channels=3, nb_steps=3, two_sublayers=True, border_mode='same', skip_merge=False, ) # Dataset config database_config = BaseDatabaseConfig(existing_dir=dataset_dir, shuffle=True, normalize=True, mode=None if dataset_dir is None else 'validation') # Define trained network session dpx_session = 'liver_dpx' user_session = 'liver_training_user' # Take user session by default session_name = user_session if os.path.exists('sessions/' + user_session) else dpx_session # Runner return SofaPrediction(environment_config=environment_config, network_config=network_config, database_config=database_config, session_dir='sessions', session_name=session_name, nb_steps=500) if __name__ == '__main__': # Define dataset dpx_session = 'sessions/liver_dpx' user_session = 'sessions/liver_data_user' # Take user dataset by default dataset = user_session if os.path.exists(user_session) else dpx_session # Get option if len(sys.argv) > 1: # Check script option if sys.argv[1] != '-e': print("Script option must be '-e' for samples produced in Environment(s)." "By default, samples are loaded from an existing Dataset.") quit(0) dataset = None # Check missing data session_name = 'valid' if dataset is None else 'valid_data' LiverDownloader().get_session(session_name) # Create SOFA runner runner = create_runner(dataset) # Launch SOFA GUI Sofa.Gui.GUIManager.Init("main", "qglviewer") Sofa.Gui.GUIManager.createGUI(runner.root, __file__) Sofa.Gui.GUIManager.SetDimension(1080, 1080) Sofa.Gui.GUIManager.MainLoop(runner.root) Sofa.Gui.GUIManager.closeGUI() # Manually close the runner (security if stuff like additional dataset need to be saved) runner.close() # Delete unwanted files for file in os.listdir(os.path.dirname(os.path.abspath(__file__))): if '.ini' in file or '.log' in file: os.remove(file)
PypiClean
/FLAML-2.0.2-py3-none-any.whl/flaml/default/portfolio.py
import pandas as pd import numpy as np import argparse from pathlib import Path import json from sklearn.preprocessing import RobustScaler from flaml.default import greedy from flaml.default.regret import load_result, build_regret from flaml.version import __version__ regret_bound = 0.01 def config_predictor_tuple(tasks, configs, meta_features, regret_matrix): """Config predictor represented in tuple. The returned tuple consists of (meta_features, preferences, proc). Returns: meta_features_norm: A dataframe of normalized meta features, each column for a task. preferences: A dataframe of sorted configuration indicies by their performance per task (column). regret_matrix: A dataframe of the configuration(row)-task(column) regret matrix. """ # pre-processing scaler = RobustScaler() meta_features_norm = meta_features.loc[tasks] # this makes a copy meta_features_norm.loc[:, :] = scaler.fit_transform(meta_features_norm) proc = { "center": scaler.center_.tolist(), "scale": scaler.scale_.tolist(), } # best model for each dataset in training # choices = regret_matrix[tasks].loc[configs].reset_index(drop=True).idxmin() # break ties using the order in configs regret = ( regret_matrix[tasks] .loc[configs] .reset_index(drop=True) .apply(lambda row: row.apply(lambda x: (x, row.name)), axis=1) ) print(regret) preferences = pd.DataFrame(np.argsort(regret, axis=0), columns=regret.columns) print(preferences) return (meta_features_norm, preferences, proc) def build_portfolio(meta_features, regret, strategy): """Build a portfolio from meta features and regret matrix. Args: meta_features: A dataframe of metafeatures matrix. regret: A dataframe of regret matrix. strategy: A str of the strategy, one of ("greedy", "greedy-feedback"). """ assert strategy in ("greedy", "greedy-feedback") if strategy == "greedy": portfolio = greedy.construct_portfolio(regret, None, regret_bound) elif strategy == "greedy-feedback": portfolio = greedy.construct_portfolio(regret, meta_features, regret_bound) if "default" not in portfolio and "default" in regret.index: portfolio += ["default"] return portfolio def load_json(filename): """Returns the contents of json file filename.""" with open(filename, "r") as f: return json.load(f) def _filter(preference, regret): """Remove choices after default or have NaN regret.""" try: last = regret.index.get_loc("default") # len(preference) - 1 preference = preference[: preference[preference == last].index[0] + 1] except KeyError: # no "default" pass finally: regret = regret.reset_index(drop=True) preference = preference[regret[preference].notna().to_numpy()] # regret = regret[preference].reset_index(drop=True) # dup = regret[regret.duplicated()] # if not dup.empty: # # break ties using the order in configs # unique = dup.drop_duplicates() # for u in unique: # subset = regret == u # preference[subset].sort_values(inplace=True) # # raise ValueError(preference) return preference.tolist() def serialize(configs, regret, meta_features, output_file, config_path): """Store to disk all information FLAML-metalearn needs at runtime. configs: names of model configs regret: regret matrix meta_features: task metafeatures output_file: filename config_path: path containing config json files """ output_file = Path(output_file) # delete if exists try: output_file.unlink() except FileNotFoundError: pass meta_features_norm, preferences, proc = config_predictor_tuple(regret.columns, configs, meta_features, regret) portfolio = [load_json(config_path.joinpath(m + ".json")) for m in configs] regret = regret.loc[configs] meta_predictor = { "version": __version__, "meta_feature_names": list(meta_features.columns), "portfolio": portfolio, "preprocessing": proc, "neighbors": [ {"features": x.tolist(), "choice": _filter(preferences[y], regret[y])} for x, y in zip(meta_features_norm.to_records(index=False), preferences.columns) ], "configsource": list(configs), } with open(output_file, "w+") as f: json.dump(meta_predictor, f, indent=4) return meta_predictor # def analyze(regret_matrix, meta_predictor): # tasks = regret_matrix.columns # neighbors = meta_predictor["neighbors"] # from sklearn.neighbors import NearestNeighbors # nn = NearestNeighbors(n_neighbors=1) # for i, task in enumerate(neighbors): # other_tasks = [j for j in range(len(neighbors)) if j != i] # # find the nn and the regret # nn.fit([neighbors[j]["features"] for j in other_tasks]) # dist, ind = nn.kneighbors( # np.array(task["features"]).reshape(1, -1), return_distance=True # ) # ind = other_tasks[int(ind.item())] # choice = int(neighbors[ind]["choice"][0]) # r = regret_matrix.iloc[choice, i] # if r > regret_bound: # label = "outlier" # else: # label = "normal" # print(tasks[i], label, tasks[ind], "dist", dist, "regret", r) # # find the best model and the regret # regrets = regret_matrix.iloc[other_tasks, i] # best = regrets.min() # if best > regret_bound: # print(tasks[i], "best_regret", best, "task", regrets.idxmin()) def main(): parser = argparse.ArgumentParser(description="Build a portfolio.") parser.add_argument("--strategy", help="One of {greedy, greedy-feedback}", default="greedy") parser.add_argument("--input", help="Input path") parser.add_argument("--metafeatures", help="CSV of task metafeatures") parser.add_argument("--exclude", help="One task name to exclude (for LOO purposes)") parser.add_argument("--output", help="Location to write portfolio JSON") parser.add_argument("--task", help="Task to merge portfolios", default="binary") parser.add_argument( "--estimator", help="Estimators to merge portfolios", default=["lgbm", "xgboost"], nargs="+", ) args = parser.parse_args() meta_features = pd.read_csv(args.metafeatures, index_col=0).groupby(level=0).first() if args.exclude: meta_features.drop(args.exclude, inplace=True) baseline_best = None all_results = None for estimator in args.estimator: # produce regret all, baseline = load_result(f"{args.input}/{estimator}/results.csv", args.task, "result") regret = build_regret(all, baseline) regret = regret.replace(np.inf, np.nan).dropna(axis=1, how="all") if args.exclude: regret = regret.loc[[i for i in regret.index if args.exclude not in i]] regret = regret[[c for c in regret.columns if args.exclude not in c]] print(f"Regret matrix complete: {100 * regret.count().sum() / regret.shape[0] / regret.shape[1]}%") print(f"Num models considered: {regret.shape[0]}") configs = build_portfolio(meta_features, regret, args.strategy) meta_predictor = serialize( configs, regret, meta_features, f"{args.output}/{estimator}/{args.task}.json", Path(f"{args.input}/{estimator}"), ) configsource = meta_predictor["configsource"] all = all.loc[configsource] all.rename({x: f"{estimator}/{x}" for x in regret.index.values}, inplace=True) baseline_best = baseline if baseline_best is None else pd.DataFrame({0: baseline_best, 1: baseline}).max(1) all_results = all if all_results is None else pd.concat([all_results, all]) # analyze(regret, meta_predictor) regrets = build_regret(all_results, baseline_best) if len(args.estimator) > 1: meta_predictor = serialize( regrets.index, regrets, meta_features, f"{args.output}/all/{args.task}.json", Path(args.input), ) if __name__ == "__main__": # execute only if run as a script main()
PypiClean
/Lahap-0.2.1.tar.gz/Lahap-0.2.1/README.md
# Lahap Lahap is a utility package for AWS Athena and AWS Glue. <a href="https://github.com/psf/black"><img alt="Code Style: Black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a> ## Usage In order to run Lahap functions you must instantiate a Lahap session, a `boto3.Session` wrapper to manage boto3 calls. Use can provide any valid `boto3.Session` parameter to it. ```python from lahap import create_session lahap = create_session(region_name="us-east-1", profile_name="profile") # using profile lahap = create_session(region_name="us-east-1", aws_access_key_id="access-key", aws_secret_access_key="secret-key") # using explicit key credentials ``` ### Truncate table Deletes all S3 files located in a Glue Table's S3 location. Be careful. ```python lahap.truncate_table(database="catalog-database", table="catalog-table") ``` ### Drop table Drops Glue Table with or without its respective data in S3. Be careful. ```python lahap.drop_table(database="catalog-database", table="catalog-table", only_schema=False) ``` ### Convert table to Parquet Copies a table storing it as Parquet files through CTA. ```python lahap.convert_table_to_parquet( query_database="source-database", query_table="source-table", compression="parquet-compression", # "UNCOMPRESSED", "SNAPPY", "LZO", "GZIP" result_database="result-database", result_table="result-table-parquet", external_location="s3://my-bucket/path", ) ``` ### Convert query to Parquet Create a new table from query and storing it as Parquet files through CTA. ```python lahap.convert_query_to_parquet( query="SELECT * FROM database.table", compression="parquet-compression", # "UNCOMPRESSED", "SNAPPY", "LZO", "GZIP" result_database="result-database", result_table="result-table-parquet", external_location="s3://my-bucket/path", ) ``` ## References ### Amazon Athena CTA https://docs.aws.amazon.com/athena/latest/ug/create-table-as.html
PypiClean
/KL_Audit_supportV2.8-1.0-py3-none-any.whl/AuditModule/core/applications/AuditManagement.py
import inspect from AuditModule.core.applications import Annotations from AuditModule.util import Logging as LOGG from AuditModule.common import AppConstants # from bin.util import CommonUtils from AuditModule.core.applications import AuditManagementModules from AuditModule.core.persistences import PersistenceAdaptor import traceback import socket import json from datetime import datetime from _thread import * from AuditModule.common.configuration_settings import config from time import gmtime, strftime __all__ = {"start_new_thread"} Logger = LOGG.get_logger() audit_management_obj = PersistenceAdaptor.get_instance('CassandraDButility') class Audit(): def save_audit_entry(self, op_type): """ This method is for saving audit entry in the database """ try: Logger.debug('Initializing the auditing') function_call_stack_frame_data = Annotations.fetch_function_stack_frame(inspect.stack()) application_type = self.fetch_application_type(function_call_stack_frame_data) content_type = self.fetch_content_type(function_call_stack_frame_data) start_new_thread(self.save_audit_entry_impl, (application_type, content_type, function_call_stack_frame_data, op_type)) except Exception as e: print((traceback.format_exc())) Logger.error('Error in auditing', str(e)) @staticmethod def fetch_application_type(function_call_stack_frame_data): """ This method fetches the application type based on the application context :param function_call_stack_frame_data: Function call stack frame data :return: Application type """ try: application_context = function_call_stack_frame_data.get("application_context", "") application_type = AppConstants.AuditLogsConstants.application_type_json.get( application_context, {}).get("application_type", "") print("application_context", application_context) print("------", application_type) Logger.debug('Application type is {}'.format(application_type)) return application_type except Exception as e: print((traceback.format_exc())) Logger.error('Error in fetching application type ', str(e)) @staticmethod def fetch_content_type(function_call_stack_frame_data): """ This method fetches the application type based on the application context :param function_call_stack_frame_data: Function call stack frame data :return: Application type """ try: application_context = function_call_stack_frame_data.get("application_context", "") application_type = AppConstants.AuditLogsConstants.application_type_json.get( application_context, {}).get("type", "") Logger.debug('Content type is {}'.format(application_type)) return application_type except Exception as e: print((traceback.format_exc())) Logger.error('Error in fetching content type ', str(e)) @staticmethod def save_audit_entry_impl(application_type, content_type, function_call_stack_frame_data, op_type): try: data = dict() user_name, client_id, user_role_name, operations, parameter_lable, status = \ AuditManagementModules.audit_logs_modules(application_type, content_type, function_call_stack_frame_data) table = config['AUDIT_MAPPING'][op_type] table_id = "Audit_id" id = AuditManagementModules.generate_id(table_id, op_type) data['id'] = "audit_" + str(id) data["user_name"] = user_name data['client_id'] = client_id data['client_address'] = function_call_stack_frame_data['ip_address'] data['operations'] = operations data['parameter_lable'] = json.dumps(parameter_lable) data['status'] = status data['module'] = op_type data['host_name'] = socket.gethostname() data['server_address'] = socket.gethostbyname(socket.gethostname()) data['time'] = datetime.utcnow() data['time_zone'] = strftime("%Z", gmtime()) audit_management_obj.insert_record(data, table) Logger.debug('Auditing completed') except Exception as ex: Logger.error(str(ex))
PypiClean
/Draugr-1.0.9.tar.gz/Draugr-1.0.9/.github/CODE_OF_CONDUCT.md
# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at detervistnogetmed@hotmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] [homepage]: http://contributor-covenant.org [version]: http://contributor-covenant.org/version/1/4/
PypiClean
/OASYS1_HALF_SRW-0.0.3-py3-none-any.whl/orangecontrib/srw/util/python_script.py
__author__ = 'labx' import sys import code import keyword import itertools from PyQt5 import QtGui, QtWidgets from PyQt5.QtCore import QItemSelectionModel from PyQt5.QtGui import ( QTextCursor, QFont, QColor, QPalette ) from PyQt5.QtCore import Qt, QRegExp def text_format(foreground=Qt.black, weight=QFont.Normal): fmt = QtGui.QTextCharFormat() fmt.setForeground(QtGui.QBrush(foreground)) fmt.setFontWeight(weight) return fmt class PythonSyntaxHighlighter(QtGui.QSyntaxHighlighter): def __init__(self, parent=None): self.keywordFormat = text_format(Qt.blue, QFont.Bold) self.stringFormat = text_format(Qt.darkGreen) self.defFormat = text_format(Qt.black, QFont.Bold) self.commentFormat = text_format(Qt.lightGray) self.decoratorFormat = text_format(Qt.darkGray) self.keywords = list(keyword.kwlist) self.rules = [(QRegExp(r"\b%s\b" % kwd), self.keywordFormat) for kwd in self.keywords] + \ [(QRegExp(r"\bdef\s+([A-Za-z_]+[A-Za-z0-9_]+)\s*\("), self.defFormat), (QRegExp(r"\bclass\s+([A-Za-z_]+[A-Za-z0-9_]+)\s*\("), self.defFormat), (QRegExp(r"'.*'"), self.stringFormat), (QRegExp(r'".*"'), self.stringFormat), (QRegExp(r"#.*"), self.commentFormat), (QRegExp(r"@[A-Za-z_]+[A-Za-z0-9_]+"), self.decoratorFormat)] self.multilineStart = QRegExp(r"(''')|" + r'(""")') self.multilineEnd = QRegExp(r"(''')|" + r'(""")') super().__init__(parent) def highlightBlock(self, text): for pattern, format in self.rules: exp = QRegExp(pattern) index = exp.indexIn(text) while index >= 0: length = exp.matchedLength() if exp.numCaptures() > 0: self.setFormat(exp.pos(1), len(str(exp.cap(1))), format) else: self.setFormat(exp.pos(0), len(str(exp.cap(0))), format) index = exp.indexIn(text, index + length) # Multi line strings start = self.multilineStart end = self.multilineEnd self.setCurrentBlockState(0) startIndex, skip = 0, 0 if self.previousBlockState() != 1: startIndex, skip = start.indexIn(text), 3 while startIndex >= 0: endIndex = end.indexIn(text, startIndex + skip) if endIndex == -1: self.setCurrentBlockState(1) commentLen = len(text) - startIndex else: commentLen = endIndex - startIndex + 3 self.setFormat(startIndex, commentLen, self.stringFormat) startIndex, skip = (start.indexIn(text, startIndex + commentLen + 3), 3) class PythonScriptEditor(QtWidgets.QPlainTextEdit): INDENT = 4 def lastLine(self): text = str(self.toPlainText()) pos = self.textCursor().position() index = text.rfind("\n", 0, pos) text = text[index: pos].lstrip("\n") return text def keyPressEvent(self, event): if event.key() == Qt.Key_Return: text = self.lastLine() indent = len(text) - len(text.lstrip()) if text.strip() == "pass" or text.strip().startswith("return "): indent = max(0, indent - self.INDENT) elif text.strip().endswith(":"): indent += self.INDENT super().keyPressEvent(event) self.insertPlainText(" " * indent) elif event.key() == Qt.Key_Tab: self.insertPlainText(" " * self.INDENT) elif event.key() == Qt.Key_Backspace: text = self.lastLine() if text and not text.strip(): cursor = self.textCursor() for i in range(min(self.INDENT, len(text))): cursor.deletePreviousChar() else: super().keyPressEvent(event) else: super().keyPressEvent(event) class PythonConsole(QtWidgets.QPlainTextEdit, code.InteractiveConsole): def __init__(self, locals=None, parent=None): QtWidgets.QPlainTextEdit.__init__(self, parent) code.InteractiveConsole.__init__(self, locals) self.history, self.historyInd = [""], 0 self.loop = self.interact() next(self.loop) self.setStyleSheet("background-color:black; color: white; font-family: Courier, monospace;") def setLocals(self, locals): self.locals = locals def flush(self): pass def interact(self, banner=None): try: sys.ps1 except AttributeError: sys.ps1 = ">>> " try: sys.ps2 except AttributeError: sys.ps2 = "... " cprt = ('Type "help", "copyright", "credits" or "license" ' 'for more information.') if banner is None: self.write("Python %s on %s\n%s\n(%s)\n" % (sys.version, sys.platform, cprt, self.__class__.__name__)) else: self.write("%s\n" % str(banner)) more = 0 while 1: try: if more: prompt = sys.ps2 else: prompt = sys.ps1 self.new_prompt(prompt) yield try: line = self.raw_input(prompt) except EOFError: self.write("\n") break else: more = self.push(line) except KeyboardInterrupt: self.write("\nKeyboardInterrupt\n") self.resetbuffer() more = 0 def raw_input(self, prompt): input = str(self.document().lastBlock().previous().text()) return input[len(prompt):] def new_prompt(self, prompt): self.write(prompt) self.newPromptPos = self.textCursor().position() def write(self, data): cursor = QTextCursor(self.document()) cursor.movePosition(QTextCursor.End, QTextCursor.MoveAnchor) cursor.insertText(data) self.setTextCursor(cursor) self.ensureCursorVisible() def writelines(self, lines): for line in lines: self.write(line) def push(self, line): if self.history[0] != line: self.history.insert(0, line) self.historyInd = 0 saved = sys.stdout, sys.stderr try: sys.stdout, sys.stderr = self, self return code.InteractiveConsole.push(self, line) finally: sys.stdout, sys.stderr = saved def setLine(self, line): cursor = QTextCursor(self.document()) cursor.movePosition(QTextCursor.End) cursor.setPosition(self.newPromptPos, QTextCursor.KeepAnchor) cursor.removeSelectedText() cursor.insertText(line) self.setTextCursor(cursor) def keyPressEvent(self, event): if event.key() == Qt.Key_Return: self.write("\n") next(self.loop) elif event.key() == Qt.Key_Up: self.historyUp() elif event.key() == Qt.Key_Down: self.historyDown() elif event.key() == Qt.Key_Tab: self.complete() elif event.key() in [Qt.Key_Left, Qt.Key_Backspace]: if self.textCursor().position() > self.newPromptPos: QtWidgets.QPlainTextEdit.keyPressEvent(self, event) else: QtWidgets.QPlainTextEdit.keyPressEvent(self, event) def historyUp(self): self.setLine(self.history[self.historyInd]) self.historyInd = min(self.historyInd + 1, len(self.history) - 1) def historyDown(self): self.setLine(self.history[self.historyInd]) self.historyInd = max(self.historyInd - 1, 0) def complete(self): pass def _moveCursorToInputLine(self): """ Move the cursor to the input line if not already there. If the cursor if already in the input line (at position greater or equal to `newPromptPos`) it is left unchanged, otherwise it is moved at the end. """ cursor = self.textCursor() pos = cursor.position() if pos < self.newPromptPos: cursor.movePosition(QTextCursor.End) self.setTextCursor(cursor) def pasteCode(self, source): """ Paste source code into the console. """ self._moveCursorToInputLine() for line in interleave(source.splitlines(), itertools.repeat("\n")): if line != "\n": self.insertPlainText(line) else: self.write("\n") next(self.loop) def insertFromMimeData(self, source): """ Reimplemented from QPlainTextEdit.insertFromMimeData. """ if source.hasText(): self.pasteCode(str(source.text())) return def interleave(seq1, seq2): """ Interleave elements of `seq2` between consecutive elements of `seq1`. >>> list(interleave([1, 3, 5], [2, 4])) [1, 2, 3, 4, 5] """ iterator1, iterator2 = iter(seq1), iter(seq2) leading = next(iterator1) for element in iterator1: yield leading yield next(iterator2) leading = element yield leading class Script(object): Modified = 1 MissingFromFilesystem = 2 def __init__(self, name, script, flags=0, filename=None): self.name = name self.script = script self.flags = flags self.filename = filename class ScriptItemDelegate(QtWidgets.QStyledItemDelegate): def __init__(self, parent): super().__init__(parent) def displayText(self, script, locale): if script.flags & Script.Modified: return "*" + script.name else: return script.name def paint(self, painter, option, index): script = index.data(Qt.DisplayRole) if script.flags & Script.Modified: option = QtWidgets.QStyleOptionViewItem(option) option.palette.setColor(QPalette.Text, QColor(Qt.red)) option.palette.setColor(QPalette.Highlight, QColor(Qt.darkRed)) super().paint(painter, option, index) def createEditor(self, parent, option, index): return QtWidgets.QLineEdit(parent) def setEditorData(self, editor, index): script = index.data(Qt.DisplayRole) editor.setText(script.name) def setModelData(self, editor, model, index): model[index.row()].name = str(editor.text()) def select_row(view, row): """ Select a `row` in an item view """ selmodel = view.selectionModel() selmodel.select(view.model().index(row, 0), QItemSelectionModel.ClearAndSelect)
PypiClean
/Lasagne-0.1.tar.gz/Lasagne-0.1/docs/modules/layers/conv.rst
Convolutional layers -------------------- .. automodule:: lasagne.layers.conv .. currentmodule:: lasagne.layers .. autoclass:: Conv1DLayer :members: .. autoclass:: Conv2DLayer :members: .. note:: For experts: ``Conv2DLayer`` will create a convolutional layer using ``T.nnet.conv2d``, Theano's default convolution. On compilation for GPU, Theano replaces this with a `cuDNN`_-based implementation if available, otherwise falls back to a gemm-based implementation. For details on this, please see the `Theano convolution documentation`_. Lasagne also provides convolutional layers directly enforcing a specific implementation: :class:`lasagne.layers.dnn.Conv2DDNNLayer` to enforce cuDNN, :class:`lasagne.layers.corrmm.Conv2DMMLayer` to enforce the gemm-based one, :class:`lasagne.layers.cuda_convnet.Conv2DCCLayer` for Krizhevsky's `cuda-convnet`_. .. _cuda-convnet: https://code.google.com/p/cuda-convnet/ .. _cuDNN: https://developer.nvidia.com/cudnn .. _Theano convolution documentation: http://deeplearning.net/software/theano/library/tensor/nnet/conv.html
PypiClean
/BayesASE-21.1.13.1.tar.gz/BayesASE-21.1.13.1/scripts/check_lost_reads.py
import csv, sys, argparse, os, itertools, operator, collections import pandas as pd import numpy as np def get_args(): parser = argparse.ArgumentParser(description="""check total reads column in alignment file match the sum of the other columns""") parser.add_argument('-a1','--alnSum1',dest='alnSum1', action='store', required=True, help='The G1 alignment summary file containing all read types [Required]') parser.add_argument('-a2','--alnSum2',dest='alnSum2', action='store', required=True, help='The G2 alignment summary file containing all read types [Required]') parser.add_argument('-fq','--fq', dest='fq', action='store', required=True, help='FQ file used in alignment [Required]') parser.add_argument('-o','--out', dest='out', action='store', required=True, help='Output file containing check info [Required]') args = parser.parse_args() return args def main(): args = get_args() with open(args.fq, 'r') as fq: reads = [] for line in fq: if line.startswith("@"): reads.append(line) start_reads = len(reads) bname = os.path.basename(args.fq) name = os.path.splitext(bname)[0] with open(args.alnSum1, 'r') as sum_table1: sumT1=csv.reader(sum_table1, delimiter='\t') next(sumT1) for row in sumT1: opp1=int(row[2]) unmap1=int(row[3]) mapread1=int(row[4]) amb1=int(row[5]) end_reads1 = opp1 + unmap1 + mapread1 + amb1 with open(args.alnSum2, 'r') as sum_table2: sumT2=csv.reader(sum_table2, delimiter='\t') next(sumT2) for row in sumT2: opp2=int(row[2]) unmap2=int(row[3]) mapread2=int(row[4]) amb2=int(row[5]) end_reads2 = opp2 + unmap2 + mapread2 + amb2 if start_reads == end_reads1: flag_start_readNum_eq_end_readNum_G1 = 1 else: flag_start_readNum_eq_end_readNum_G1 = 0 if start_reads == end_reads2: flag_start_readNum_eq_end_readNum_G2 = 1 else: flag_start_readNum_eq_end_readNum_G2 = 0 with open(args.out, 'w') as outfile: csvwriter=csv.writer(outfile, delimiter='\t') header = ['fqName', 'start_read_num', 'readNum_G1', 'readNum_G2', 'flag_start_readNum_eq_readNum_G1', 'flag_start_readNum_eq_readNum_G2'] csvwriter.writerow(header) row_items = [name, start_reads, end_reads1, end_reads2, flag_start_readNum_eq_end_readNum_G1, flag_start_readNum_eq_end_readNum_G2] csvwriter.writerow(row_items) if __name__ == '__main__': main()
PypiClean
/Homevee_Dev-0.0.0.0-py3-none-any.whl/Homevee/Item/HomeBudgetItem.py
import traceback from Homevee.Exception import DatabaseSaveFailedException, InvalidParametersException from Homevee.Helper import Logger from Homevee.Item import Item from Homevee.Utils.Database import Database class HomeBudgetItem(Item): def __init__(self, date, info, amount, id=None): super(HomeBudgetItem, self).__init__() self.date = date self.info = info self.amount = amount self.id = id def delete(self, db=None): try: Database.delete("DELETE FROM HOME_BUDGET_DATA WHERE ID == :id", {'id': self.id}, db) return True except: return False def save_to_db(self, db=None): try: if (self.id is None or self.id == ""): new_id = Database.insert("""INSERT INTO HOME_BUDGET_DATA (DATE, INFO, AMOUNT) VALUES (:date, :info, :amount)""", {'date': self.date, 'info': self.info, 'amount': self.amount}, db) self.id = new_id # update else: Database.update("""UPDATE HOME_BUDGET_DATA SET DATE = :date, INFO = :info, AMOUNT = :amount WHERE ID = :id""", {'date': self.date, 'info': self.info, 'amount': self.amount, 'id': self.id}, db) except: if(Logger.IS_DEBUG): traceback.print_exc() raise DatabaseSaveFailedException("Could not save homebudgetitem to database") def build_dict(self): dict = { 'id': self.id, 'info': self.info, 'date': self.date, 'amount': self.amount } return dict @staticmethod def load_all_from_db(query, params, db=None): items = [] for result in Database.select_all(query, params, db): item = HomeBudgetItem(result['FORMATTED_DATE'], result['INFO'], result['AMOUNT'], result['ID']) items.append(item) return items @staticmethod def load_all(db=None): return HomeBudgetItem.load_all_from_db("""SELECT INFO, AMOUNT, ID, strftime(\"%d.%m.%Y\", DATE) as FORMATTED_DATE FROM HOME_BUDGET_ITEM""", {}, db) @staticmethod def load_home_budget_items_by_date(date, db): return HomeBudgetItem.load_all_from_db(""""SELECT *, strftime(\"%d.%m.%Y\", DATE) as FORMATTED_DATE FROM HOME_BUDGET_DATA WHERE DATE = :date GROUP BY DATE ORDER BY DATE ASC""", {'date': date}, db) @staticmethod def create_from_dict(dict): try: id = dict['id'] date = dict['date'] info = dict['info'] amount = dict['amount'] item = HomeBudgetItem(date, info, amount, id) return item except: raise InvalidParametersException("HomeBudgetItem.create_from_dict(): invalid dict")
PypiClean
/FlaskWarts-0.1a5.tar.gz/FlaskWarts-0.1a5/README.rst
=========== FlaskWarts =========== FlaskWarts are a set of utility classes and functions for making it easier to overcome some of the warts that one always seem to encounter when developing Flask applications. It's called 'warts' because 'utils' was taken. The main package is still called ``utils``, because that's what it was called in a few apps author was using the code in, and was too lazy to refactor them all. Overview ======== The library is not too generic. In fact, it's quite opinionated. It's made available as is if you care to use it, but it's mainly there for the author's use cases. While not an immediate goal, it is author's intention to make this a fully-comliant Flask extension at some point, and far more flexible. It may be brokend down into multiple separate libraries as well. Currently, it is a multi-purpose library that expects you to organize your site as a single application and uses ``flask.current_app`` extensively. While this is not a good pattern in general, it's a pattern that works (for the time being). It also expects that you place your configuration in ``app.config``. Note that not all code is fully developed and tested. Some of the code even misses unit tests, and documentation doesn't exist. So this is pretty much pre-alpha software. Also, don't expect anything in the way of API stability in any form. Features ======== The utils have following features: + Class-based route handlers (``utils.routes``) + Form-handling with Formencode (``utils.forms``) + CSRF middleware (``utils.middlewares``) + Basic timezone manipulation support (``utils.timezone``) + One decorator for denying non-XHR requests (``utils.decorators``) Installation ============ Either:: easy_install FlaskWarts Or:: pip install FlaskWarts Class-based route handling ========================== Please see the sources for now. More detailed docs are planned for future releases. It's similar to Django's CBVs but not quite the same. Simple example:: from flask import render_template from utils.routes import Route class MyRoute(Route): path = '/my' def GET(self): return render_template('foo.html') Method names correspond to HTTP methods, and any positional or keyword arguments in the paths are passed as positional and keyword arguments to the method. In addition, the positional and keyword arguments are accessible as ``self.args`` and ``self.kwargs``, a list and dict respectively. Methods are expected to return a response just like normal flask route handler functions. By default, HTTP method overrides are supported via the ``_method`` request parameter for all POST requests, and the ``Route`` class intelligently maps them to the correct instance method. This is handled transparently behind the scenes so you don't have to worry about it. You can disable this behavior, by setting the ``allow_overrides`` property to ``False`` in your subclass. There are many subclasses of the ``Route`` class which implement different interfaces for common tasks like form processing and template rendering. Some of them implement multiple HTTP methods. For now you will have to look at the sources to find out more. Routes are reigstered by calling the ``register()`` class method, and passing it an application object:: MyRoute.register(app) The route name can be explicitly specified using the ``name`` property. Otherwise, it is derived from the class name by decamelizing it. For instance, ``MyRoute`` has a name of ``my_route``. ``Foo`` has a name of ``foo``, and so on. If you organize routes into modules (e.g., have multiple route classes in a single module), you can register them in batches. :: from utils.routes import register_module register_module(app, 'myapp.routes') The ``register_module()`` function will register any object that has ``register()`` and ``get_route_name()`` attributes, and whose path attribute is not ``None``. This is generally a good enough set of rules to catch all routes, but if you have objects that accidentally fulfill the conditions, the function will try to register it, so be careful. If you want to explicitly exlude routes, you can use the ``exclude`` argument. This argument should be an iterable containing names of classes or route names. For instance:: register_module(app, 'myapp.routes', exclude=['Test', 'mickey_mouse']) The above excludes classes ``Test`` and ``MickeyMouse``. Form-handling with Formencode ============================= Allows basic form validation using Formencode's ``htmlfill_schemabuilder``. Docs on the way, so stay tuned. CSRF middleware =============== A bit rough at the moment, but works. Uses ``os.urandom`` for random number generation, you've been warned. It also makes Jinja2 a requirement. Basic usage is simply:: from utils.middlewares import csrf csrf(app) In template:: <form method="POST"> {{ csrf_tag }} ... </form> Docs? Who said anything about docs? :D Timezone manipulation ===================== Requires pytz_. Again, no docs yet, so please look at the module. Decorators ========== To prevent non-XHR requests to your endpoint, just do this:: from utils.decorators import xhr_only @app.route('/') @xhr_only def my_view(): pass It will abort a non-XHR request with HTTP 400 status. .. _pytz: http://pytz.sourceforge.net/
PypiClean
/Av1an-6.1.post2.tar.gz/Av1an-6.1.post2/av1an/chunk/Chunk.py
from pathlib import Path from typing import Dict, Any from av1an.commandtypes import Command class Chunk: """ Chunk class. Stores information relating to a chunk. The command that gets the chunk and the encoding commands to be run on this chunk. """ def __init__(self, temp: Path, index: int, ffmpeg_gen_cmd: Command, output_ext: str, size: int, frames: int): """ Chunk class constructor Note: while ffmpeg_gen_cmd is a Command, it must be serializable, so Path can't be used :param temp: The temp directory :param index: the index of this chunk :param ffmpeg_gen_cmd: the ffmpeg command that will generate this chunk :param output_ext: the output extension after encoding :param size: the size of this chunk. used for sorting :param frames: the number of frames in this chunk """ self.index: int = index self.ffmpeg_gen_cmd: Command = ffmpeg_gen_cmd self.size: int = size self.temp: Path = temp self.frames: int = frames self.output_ext: str = output_ext self.per_shot_target_quality_cq = None self.per_frame_target_quality_q_list = None def to_dict(self) -> Dict[str, Any]: """ Converts this chunk to a dictionary for easy json serialization :return: A dictionary """ return { 'index': self.index, 'ffmpeg_gen_cmd': self.ffmpeg_gen_cmd, 'size': self.size, 'frames': self.frames, 'output_ext': self.output_ext, 'per_shot_target_quality_cq': self.per_shot_target_quality_cq, } @property def fake_input_path(self) -> Path: """ Returns the mkv chunk file that would have been for this chunk in the old chunk system. Ex: .temp/split/00000.mkv :return: a path """ return (self.temp / 'split') / f'{self.name}.mkv' @property def output_path(self) -> Path: """ Gets the path of this chunk after being encoded with an extension Ex: Path('.temp/encode/00000.ivf') :return: the Path of this encoded chunk """ return (self.temp / 'encode') / f'{self.name}.{self.output_ext}' @property def output(self) -> str: """ Gets the posix string of this chunk's output_path (with extension) See: Chunk.output_path Ex: '.temp/encode/00000.ivf' :return: the string of this chunk's output path """ return self.output_path.as_posix() @property def fpf(self) -> str: """ Gets the posix string of this chunks first pass file without an extension Ex: '.temp/split/00000_fpf' :return: the string of this chunk's first pass file (no extension) """ fpf_file = (self.temp / 'split') / f'{self.name}_fpf' return fpf_file.as_posix() @property def name(self) -> str: """ Gets the name of this chunk. It is the index zero padded to length 5 as a string. Ex: '00000' :return: the name of this chunk as a string """ return str(self.index).zfill(5) @staticmethod def create_from_dict(d: dict, temp): """ Creates a chunk from a dictionary. See: Chunk.to_dict :param d: the dictionary :param temp: the temp directory :return: A Chunk from the dictionary """ chunk = Chunk(temp, d['index'], d['ffmpeg_gen_cmd'], d['output_ext'], d['size'], d['frames']) chunk.per_shot_target_quality_cq = d['per_shot_target_quality_cq'] return chunk def make_q_file(self, q_list): qfile = self.fake_input_path.with_name( f'q_file_{self.name}').with_suffix('.txt') with open(qfile, 'w') as fl: text = '' for x in q_list: text += str(x) + '\n' fl.write(text) return qfile
PypiClean
/IPython-Dashboard-0.1.5.tar.gz/IPython-Dashboard-0.1.5/dashboard/static/js/gridstack-0.2.4.min.js
!function(t){"function"==typeof define&&define.amd?define(["jquery","lodash","jquery-ui/core","jquery-ui/widget","jquery-ui/mouse","jquery-ui/draggable","jquery-ui/resizable"],t):t(jQuery,_)}(function(t,e){var i=window,n={is_intercepted:function(t,e){return!(t.x+t.width<=e.x||e.x+e.width<=t.x||t.y+t.height<=e.y||e.y+e.height<=t.y)},sort:function(t,i,n){return n=n||e.chain(t).map(function(t){return t.x+t.width}).max().value(),i=-1!=i?1:-1,e.sortBy(t,function(t){return i*(t.x+t.y*n)})},create_stylesheet:function(t){var e=document.createElement("style");return e.setAttribute("type","text/css"),e.setAttribute("data-gs-id",t),e.styleSheet?e.styleSheet.cssText="":e.appendChild(document.createTextNode("")),document.getElementsByTagName("head")[0].appendChild(e),e.sheet},insert_css_rule:function(t,e,i,n){"function"==typeof t.insertRule?t.insertRule(e+"{"+i+"}",n):"function"==typeof t.addRule&&t.addRule(e,i,n)},toBool:function(t){return"boolean"==typeof t?t:"string"==typeof t?(t=t.toLowerCase(),!(""==t||"no"==t||"false"==t||"0"==t)):Boolean(t)}},o=0,s=function(t,e,i,n,o){this.width=t,this["float"]=i||!1,this.height=n||0,this.nodes=o||[],this.onchange=e||function(){},this._update_counter=0,this._float=this["float"]};s.prototype.batch_update=function(){this._update_counter=1,this["float"]=!0},s.prototype.commit=function(){this._update_counter=0,0==this._update_counter&&(this["float"]=this._float,this._pack_nodes(),this._notify())},s.prototype._fix_collisions=function(t){this._sort_nodes(-1);var i=t,o=Boolean(e.find(this.nodes,function(t){return t.locked}));for(this["float"]||o||(i={x:0,y:t.y,width:this.width,height:t.height});;){var s=e.find(this.nodes,function(e){return e!=t&&n.is_intercepted(e,i)},this);if("undefined"==typeof s)return;this.move_node(s,s.x,t.y+t.height,s.width,s.height,!0)}},s.prototype.is_area_empty=function(t,i,o,s){var a={x:t||0,y:i||0,width:o||1,height:s||1},h=e.find(this.nodes,function(t){return n.is_intercepted(t,a)},this);return null==h},s.prototype._sort_nodes=function(t){this.nodes=n.sort(this.nodes,t,this.width)},s.prototype._pack_nodes=function(){this._sort_nodes(),this["float"]?e.each(this.nodes,function(t){if(!t._updating&&"undefined"!=typeof t._orig_y&&t.y!=t._orig_y)for(var i=t.y;i>=t._orig_y;){var o=e.chain(this.nodes).find(function(e){return t!=e&&n.is_intercepted({x:t.x,y:i,width:t.width,height:t.height},e)}).value();o||(t._dirty=!0,t.y=i),--i}},this):e.each(this.nodes,function(t,i){if(!t.locked)for(;t.y>0;){var o=t.y-1,s=0==i;if(i>0){var a=e.chain(this.nodes).take(i).find(function(e){return n.is_intercepted({x:t.x,y:o,width:t.width,height:t.height},e)}).value();s="undefined"==typeof a}if(!s)break;t._dirty=t.y!=o,t.y=o}},this)},s.prototype._prepare_node=function(t,i){return t=e.defaults(t||{},{width:1,height:1,x:0,y:0}),t.x=parseInt(""+t.x),t.y=parseInt(""+t.y),t.width=parseInt(""+t.width),t.height=parseInt(""+t.height),t.auto_position=t.auto_position||!1,t.no_resize=t.no_resize||!1,t.no_move=t.no_move||!1,t.width>this.width?t.width=this.width:t.width<1&&(t.width=1),t.height<1&&(t.height=1),t.x<0&&(t.x=0),t.x+t.width>this.width&&(i?t.width=this.width-t.x:t.x=this.width-t.width),t.y<0&&(t.y=0),t},s.prototype._notify=function(){if(!this._update_counter){var t=Array.prototype.slice.call(arguments,1).concat(this.get_dirty_nodes());t=t.concat(this.get_dirty_nodes()),this.onchange(t)}},s.prototype.clean_nodes=function(){e.each(this.nodes,function(t){t._dirty=!1})},s.prototype.get_dirty_nodes=function(){return e.filter(this.nodes,function(t){return t._dirty})},s.prototype.add_node=function(t){if(t=this._prepare_node(t),"undefined"!=typeof t.max_width&&(t.width=Math.min(t.width,t.max_width)),"undefined"!=typeof t.max_height&&(t.height=Math.min(t.height,t.max_height)),"undefined"!=typeof t.min_width&&(t.width=Math.max(t.width,t.min_width)),"undefined"!=typeof t.min_height&&(t.height=Math.max(t.height,t.min_height)),t._id=++o,t._dirty=!0,t.auto_position){this._sort_nodes();for(var i=0;;++i){var s=i%this.width,a=Math.floor(i/this.width);if(!(s+t.width>this.width||e.find(this.nodes,function(e){return n.is_intercepted({x:s,y:a,width:t.width,height:t.height},e)}))){t.x=s,t.y=a;break}}}return this.nodes.push(t),this._fix_collisions(t),this._pack_nodes(),this._notify(),t},s.prototype.remove_node=function(t){t._id=null,this.nodes=e.without(this.nodes,t),this._pack_nodes(),this._notify(t)},s.prototype.can_move_node=function(i,n,o,a,h){var d=Boolean(e.find(this.nodes,function(t){return t.locked}));if(!this.height&&!d)return!0;var r,_=new s(this.width,null,this["float"],0,e.map(this.nodes,function(e){return e==i?r=t.extend({},e):t.extend({},e)}));_.move_node(r,n,o,a,h);var l=!0;return d&&(l&=!Boolean(e.find(_.nodes,function(t){return t!=r&&Boolean(t.locked)&&Boolean(t._dirty)}))),this.height&&(l&=_.get_grid_height()<=this.height),l},s.prototype.can_be_placed_with_respect_to_height=function(i){if(!this.height)return!0;var n=new s(this.width,null,this["float"],0,e.map(this.nodes,function(e){return t.extend({},e)}));return n.add_node(i),n.get_grid_height()<=this.height},s.prototype.move_node=function(t,e,i,n,o,s){if("number"!=typeof e&&(e=t.x),"number"!=typeof i&&(i=t.y),"number"!=typeof n&&(n=t.width),"number"!=typeof o&&(o=t.height),"undefined"!=typeof t.max_width&&(n=Math.min(n,t.max_width)),"undefined"!=typeof t.max_height&&(o=Math.min(o,t.max_height)),"undefined"!=typeof t.min_width&&(n=Math.max(n,t.min_width)),"undefined"!=typeof t.min_height&&(o=Math.max(o,t.min_height)),t.x==e&&t.y==i&&t.width==n&&t.height==o)return t;var a=t.width!=n;return t._dirty=!0,t.x=e,t.y=i,t.width=n,t.height=o,t=this._prepare_node(t,a),this._fix_collisions(t),s||(this._pack_nodes(),this._notify()),t},s.prototype.get_grid_height=function(){return e.reduce(this.nodes,function(t,e){return Math.max(t,e.y+e.height)},0)},s.prototype.begin_update=function(t){e.each(this.nodes,function(t){t._orig_y=t.y}),t._updating=!0},s.prototype.end_update=function(){e.each(this.nodes,function(t){t._orig_y=t.y});var t=e.find(this.nodes,function(t){return t._updating});t&&(t._updating=!1)};var a=function(i,n){var o,a=this;this.container=t(i),n.item_class=n.item_class||"grid-stack-item";var h=this.container.closest("."+n.item_class).size()>0;if(this.opts=e.defaults(n||{},{width:parseInt(this.container.attr("data-gs-width"))||12,height:parseInt(this.container.attr("data-gs-height"))||0,item_class:"grid-stack-item",placeholder_class:"grid-stack-placeholder",handle:".grid-stack-item-content",cell_height:60,vertical_margin:20,auto:!0,min_width:768,"float":!1,_class:"grid-stack-"+(1e4*Math.random()).toFixed(0),animate:Boolean(this.container.attr("data-gs-animate"))||!1,always_show_resize_handle:n.always_show_resize_handle||!1,resizable:e.defaults(n.resizable||{},{autoHide:!n.always_show_resize_handle,handles:"se"}),draggable:e.defaults(n.draggable||{},{handle:".grid-stack-item-content",scroll:!1,appendTo:"body"})}),this.opts.is_nested=h,this.container.addClass(this.opts._class),h&&this.container.addClass("grid-stack-nested"),this._init_styles(),this.grid=new s(this.opts.width,function(t){var i=0;e.each(t,function(t){null==t._id?t.el.remove():(t.el.attr("data-gs-x",t.x).attr("data-gs-y",t.y).attr("data-gs-width",t.width).attr("data-gs-height",t.height),i=Math.max(i,t.y+t.height))}),a._update_styles(i+10)},this.opts["float"],this.opts.height),this.opts.auto){var d=[],r=this;this.container.children("."+this.opts.item_class).each(function(e,i){i=t(i),d.push({el:i,i:parseInt(i.attr("data-gs-x"))+parseInt(i.attr("data-gs-y"))*r.opts.width})}),e.chain(d).sortBy(function(t){return t.i}).each(function(t){a._prepare_element(t.el)}).value()}this.set_animation(this.opts.animate),this.placeholder=t('<div class="'+this.opts.placeholder_class+" "+this.opts.item_class+'"><div class="placeholder-content" /></div>').hide(),this.container.height(this.grid.get_grid_height()*(this.opts.cell_height+this.opts.vertical_margin)-this.opts.vertical_margin);var _=function(){if(a._is_one_column_mode()){if(o)return;o=!0,a.grid._sort_nodes(),e.each(a.grid.nodes,function(t){a.container.append(t.el),t.no_move||t.el.draggable("disable"),t.no_resize||t.el.resizable("disable")})}else{if(!o)return;o=!1,e.each(a.grid.nodes,function(t){t.no_move||t.el.draggable("enable"),t.no_resize||t.el.resizable("enable")})}};t(window).resize(_),_()};return a.prototype._init_styles=function(){this._styles_id&&t('[data-gs-id="'+this._styles_id+'"]').remove(),this._styles_id="gridstack-style-"+(1e5*Math.random()).toFixed(),this._styles=n.create_stylesheet(this._styles_id),null!=this._styles&&(this._styles._max=0)},a.prototype._update_styles=function(t){if(null!=this._styles){var e="."+this.opts._class+" ."+this.opts.item_class;if("undefined"==typeof t&&(t=this._styles._max,this._init_styles(),this._update_container_height()),0==this._styles._max&&n.insert_css_rule(this._styles,e,"min-height: "+this.opts.cell_height+"px;",0),t>this._styles._max){for(var i=this._styles._max;t>i;++i)n.insert_css_rule(this._styles,e+'[data-gs-height="'+(i+1)+'"]',"height: "+(this.opts.cell_height*(i+1)+this.opts.vertical_margin*i)+"px;",i),n.insert_css_rule(this._styles,e+'[data-gs-min-height="'+(i+1)+'"]',"min-height: "+(this.opts.cell_height*(i+1)+this.opts.vertical_margin*i)+"px;",i),n.insert_css_rule(this._styles,e+'[data-gs-max-height="'+(i+1)+'"]',"max-height: "+(this.opts.cell_height*(i+1)+this.opts.vertical_margin*i)+"px;",i),n.insert_css_rule(this._styles,e+'[data-gs-y="'+i+'"]',"top: "+(this.opts.cell_height*i+this.opts.vertical_margin*i)+"px;",i);this._styles._max=t}}},a.prototype._update_container_height=function(){this.grid._update_counter||this.container.height(this.grid.get_grid_height()*(this.opts.cell_height+this.opts.vertical_margin)-this.opts.vertical_margin)},a.prototype._is_one_column_mode=function(){return(window.innerWidth||document.documentElement.clientWidth||document.body.clientWidth)<=this.opts.min_width},a.prototype._prepare_element=function(i){var o=this;i=t(i),i.addClass(this.opts.item_class);var s=o.grid.add_node({x:i.attr("data-gs-x"),y:i.attr("data-gs-y"),width:i.attr("data-gs-width"),height:i.attr("data-gs-height"),max_width:i.attr("data-gs-max-width"),min_width:i.attr("data-gs-min-width"),max_height:i.attr("data-gs-max-height"),min_height:i.attr("data-gs-min-height"),auto_position:n.toBool(i.attr("data-gs-auto-position")),no_resize:n.toBool(i.attr("data-gs-no-resize")),no_move:n.toBool(i.attr("data-gs-no-move")),locked:n.toBool(i.attr("data-gs-locked")),el:i});i.data("_gridstack_node",s);var a,h,d=function(){o.container.append(o.placeholder);var e=t(this);o.grid.clean_nodes(),o.grid.begin_update(s),a=Math.ceil(e.outerWidth()/e.attr("data-gs-width")),h=o.opts.cell_height+o.opts.vertical_margin,o.placeholder.attr("data-gs-x",e.attr("data-gs-x")).attr("data-gs-y",e.attr("data-gs-y")).attr("data-gs-width",e.attr("data-gs-width")).attr("data-gs-height",e.attr("data-gs-height")).show(),s.el=o.placeholder,i.resizable("option","minWidth",a*(s.min_width||1)),i.resizable("option","minHeight",o.opts.cell_height*(s.min_height||1))},r=function(){o.placeholder.detach();var e=t(this);s.el=e,o.placeholder.hide(),e.attr("data-gs-x",s.x).attr("data-gs-y",s.y).attr("data-gs-width",s.width).attr("data-gs-height",s.height).removeAttr("style"),o._update_container_height();var i=o.grid.get_dirty_nodes();i&&i.length&&o.container.trigger("change",[i]),o.grid.end_update()};i.draggable(e.extend(this.opts.draggable,{start:d,stop:r,drag:function(t,e){var i=Math.round(e.position.left/a),n=Math.floor((e.position.top+h/2)/h);o.grid.can_move_node(s,i,n,s.width,s.height)&&(o.grid.move_node(s,i,n),o._update_container_height())},containment:this.opts.is_nested?this.container.parent():null})).resizable(e.extend(this.opts.resizable,{start:d,stop:r,resize:function(t,e){var i=Math.round(e.position.left/a),n=Math.floor((e.position.top+h/2)/h),d=Math.round(e.size.width/a),r=Math.round(e.size.height/h);o.grid.can_move_node(s,i,n,d,r)&&(o.grid.move_node(s,i,n,d,r),o._update_container_height())}})),(s.no_move||this._is_one_column_mode())&&i.draggable("disable"),(s.no_resize||this._is_one_column_mode())&&i.resizable("disable"),i.attr("data-gs-locked",s.locked?"yes":null)},a.prototype.set_animation=function(t){t?this.container.addClass("grid-stack-animate"):this.container.removeClass("grid-stack-animate")},a.prototype.add_widget=function(e,i,n,o,s,a){return e=t(e),"undefined"!=typeof i&&e.attr("data-gs-x",i),"undefined"!=typeof n&&e.attr("data-gs-y",n),"undefined"!=typeof o&&e.attr("data-gs-width",o),"undefined"!=typeof s&&e.attr("data-gs-height",s),"undefined"!=typeof a&&e.attr("data-gs-auto-position",a?"yes":null),this.container.append(e),this._prepare_element(e),this._update_container_height(),e},a.prototype.will_it_fit=function(t,e,i,n,o){var s={x:t,y:e,width:i,height:n,auto_position:o};return this.grid.can_be_placed_with_respect_to_height(s)},a.prototype.remove_widget=function(e,i){i="undefined"==typeof i?!0:i,e=t(e);var n=e.data("_gridstack_node");this.grid.remove_node(n),e.removeData("_gridstack_node"),this._update_container_height(),i&&e.remove()},a.prototype.remove_all=function(t){e.each(this.grid.nodes,function(e){this.remove_widget(e.el,t)},this),this.grid.nodes=[],this._update_container_height()},a.prototype.resizable=function(e,i){return e=t(e),e.each(function(e,n){n=t(n);var o=n.data("_gridstack_node");"undefined"!=typeof o&&null!=o&&(o.no_resize=!i,n.resizable(o.no_resize?"disable":"enable"))}),this},a.prototype.movable=function(e,i){return e=t(e),e.each(function(e,n){n=t(n);var o=n.data("_gridstack_node");"undefined"!=typeof o&&null!=o&&(o.no_move=!i,n.draggable(o.no_move?"disable":"enable"))}),this},a.prototype.disable=function(){this.movable(this.container.children("."+this.opts.item_class),!1),this.resizable(this.container.children("."+this.opts.item_class),!1)},a.prototype.enable=function(){this.movable(this.container.children("."+this.opts.item_class),!0),this.resizable(this.container.children("."+this.opts.item_class),!0)},a.prototype.locked=function(e,i){return e=t(e),e.each(function(e,n){n=t(n);var o=n.data("_gridstack_node");"undefined"!=typeof o&&null!=o&&(o.locked=i||!1,n.attr("data-gs-locked",o.locked?"yes":null))}),this},a.prototype._update_element=function(e,i){e=t(e).first();var n=e.data("_gridstack_node");if("undefined"!=typeof n&&null!=n){var o=this;o.grid.clean_nodes(),o.grid.begin_update(n),i.call(this,e,n),o._update_container_height();var s=o.grid.get_dirty_nodes();s&&s.length&&o.container.trigger("change",[s]),o.grid.end_update()}},a.prototype.resize=function(t,e,i){this._update_element(t,function(t,n){e=null!=e&&"undefined"!=typeof e?e:n.width,i=null!=i&&"undefined"!=typeof i?i:n.height,this.grid.move_node(n,n.x,n.y,e,i)})},a.prototype.move=function(t,e,i){this._update_element(t,function(t,n){e=null!=e&&"undefined"!=typeof e?e:n.x,i=null!=i&&"undefined"!=typeof i?i:n.y,this.grid.move_node(n,e,i,n.width,n.height)})},a.prototype.update=function(t,e,i,n,o){this._update_element(t,function(t,s){e=null!=e&&"undefined"!=typeof e?e:s.x,i=null!=i&&"undefined"!=typeof i?i:s.y,n=null!=n&&"undefined"!=typeof n?n:s.width,o=null!=o&&"undefined"!=typeof o?o:s.height,this.grid.move_node(s,e,i,n,o)})},a.prototype.cell_height=function(t){return"undefined"==typeof t?this.opts.cell_height:(t=parseInt(t),void(t!=this.opts.cell_height&&(this.opts.cell_height=t||this.opts.cell_height,this._update_styles())))},a.prototype.cell_width=function(){var t=this.container.children("."+this.opts.item_class).first();return Math.ceil(t.outerWidth()/t.attr("data-gs-width"))},a.prototype.get_cell_from_pixel=function(t){var e=this.container.position(),i=t.left-e.left,n=t.top-e.top,o=Math.floor(this.container.width()/this.opts.width),s=this.opts.cell_height+this.opts.vertical_margin;return{x:Math.floor(i/o),y:Math.floor(n/s)}},a.prototype.batch_update=function(){this.grid.batch_update()},a.prototype.commit=function(){this.grid.commit(),this._update_container_height()},a.prototype.is_area_empty=function(t,e,i,n){return this.grid.is_area_empty(t,e,i,n)},i.GridStackUI=a,i.GridStackUI.Utils=n,t.fn.gridstack=function(e){return this.each(function(){t(this).data("gridstack")||t(this).data("gridstack",new a(this,e))})},i.GridStackUI}); //# sourceMappingURL=gridstack.min.map
PypiClean
/B9gemyaeix-4.14.1.tar.gz/B9gemyaeix-4.14.1/docs/admin/install/venv-macos.rst
Installing on macOS =================== .. include:: steps/hw.rst .. include:: steps/install-system-devel.rst .. code-block:: sh brew install python pango cairo gobject-introspection libffi glib libyaml pip install virtualenv Make sure pip will be able to find the ``libffi`` version provided by homebrew — this will be needed during the installation build step. .. code-block:: sh export PKG_CONFIG_PATH="/usr/local/opt/libffi/lib/pkgconfig" .. include:: steps/install-system-optional.rst .. code-block:: sh brew install tesseract .. include:: steps/install-system-server.rst .. code-block:: sh # Web server option 1: NGINX and uWSGI brew install nginx uwsgi # Web server option 2: Apache with ``mod_wsgi`` brew install httpd # Caching backend: Redis brew install redis # Database server: PostgreSQL brew install postgresql .. include:: steps/install-python.rst .. include:: steps/install-configure.rst .. include:: steps/install-after.rst
PypiClean
/OASYS1-SYNED-1.0.45.tar.gz/OASYS1-SYNED-1.0.45/orangecontrib/syned/util/diaboloid_tools.py
# Yashchuk, V. V., “An analytical solution for shape of diaboloid mirror,” Light # Source Beam Line Note LSBL-1436, Advanced Light Source, Berkeley (January 8, 2020). # Yashchuk, V. V., “Shape of diaboloid mirror in laboratory coordinate system,” # Light Source Beam Line Note LSBL-1437c, Advanced Light Source, Berkeley (January 13, 2020). # K. Goldberg and Manuel Sanchez del Rio, “Direct Solution of # Diaboloid Mirrors,” Light Source Beam Line Note LSBL- 1440 (ALS, Berkeley, February 18, 2020). # Lacey, I., Sanchez del Rio, M., and Yashchuk, V. V., “Analytical expression for # the diaboloid shape in laboratory mirror coordinates verified by ray-tracing simulations,” Light # Source Beam Line Note LSBL-1445, Advanced Light Source, Berkeley (March 24, 2020). # Yashchuk, V. V., “Diaboloid shape approximation with a sagittal conical cylinder # bent to a tangential parabola: Analytical consideration,” Light Source Beam Line Note LSBL-1451, # Advanced Light Source, Berkeley (April 14, 2020). # Yashchuk, V. V., “Explicit algebraic derivation of an expression for the exact # shape of diaboloid mirror in laboratory coordinate system,” # Light Source Beam Line Note LSBL˗1462, Advanced Light Source, Berkeley (May 17, 2020). # H. A. Padmore, “Sagittal shape difference between a cylinder and a diaboloid at 2:1 # demagnification,” Light Source Beam Line Note LSBL-1465, Advanced Light Source, Berkeley # (June 03, 2020). ################## Papers ################################################################# # W. R. McKinney, J. M. Glossinger, H. A. Padmore, and M. R. Howells, # "Optical path function calculation for an incoming cylindrical wave," Proc. SPIE. 7448, 744809/1-8 # (2009); https://doi.org/10.1117/12.828490. # V. V. Yashchuk, I. Lacey, and M. Sanchez del Rio, “Analytical expressions # of the surface shape of ‘diaboloid’ mirrors,” Proc. SPIE 11493, 114930N/1-13 (2020); # https://doi.org/10.1117/12.2568332. import numpy from orangecontrib.syned.util.fqs import single_quartic from orangecontrib.syned.util.fqs import quartic_roots def diaboloid_approximated_point_to_segment( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001), detrend=0): """ Create a numeric mesh of the mirror using the approximated equation of diaboloid (point to segment) References: Kenneth Goldberg & M. Sanchez del Rio "Direct Solution of Diaboloid Mirrors" LSBL-1440 (February 2020) M. Sanchez del Rio, K. Goldberg, V. Yashchuk, I. Lacey and H. Padmore Simulations of applications using diaboloid mirrors Journal of Synchrotron Radiation (submitted 2021) :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ X = numpy.outer(x, numpy.ones_like(y)) Y = numpy.outer(numpy.ones_like(x), y) s = p * numpy.cos(2 * theta) z0 = p * numpy.sin(2 * theta) c = p + q # this is Eq. 12 in LSBL-1440 (a bit reorganised) Z = - numpy.sqrt(c ** 2 + q ** 2 - s ** 2 - 2 * Y * (s + q) - 2 * c * numpy.sqrt(X ** 2 + (q - Y) ** 2)) Z += z0 if detrend == 0: zfit = 0 elif detrend == 1: zfit = -theta * y elif detrend == 2: zcentral = Z[Z.shape[0] // 2, :] zcoeff = numpy.polyfit(y[(y.size // 2 - 10):(y.size // 2 + 10)], zcentral[(y.size // 2 - 10):(y.size // 2 + 10)], 1) zfit = zcoeff[1] + y * zcoeff[0] for i in range(Z.shape[0]): Z[i, :] = Z[i, :] - zfit return Z, X, Y def diaboloid_approximated_segment_to_point( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001), detrend=0): """ Create a numeric mesh of the mirror using the approximated equation of diaboloid (segmet to point) References: Kenneth Goldberg & M. Sanchez del Rio "Direct Solution of Diaboloid Mirrors" LSBL-1440 (February 2020) M. Sanchez del Rio, K. Goldberg, V. Yashchuk, I. Lacey and H. Padmore Simulations of applications using diaboloid mirrors Journal of Synchrotron Radiation (submitted 2021) :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Z, X, Y = diaboloid_approximated_point_to_segment(p=q, q=p, theta=theta, x=x, y=y, detrend=detrend) for i in range(x.size): Z[i,:] = numpy.flip(Z[i,:]) return Z, X, Y def toroid_point_to_segment( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numeric mesh of the toroid (point to segment) :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Rt = 2.0 / numpy.sin(theta) / (1 / p) Rs = 2.0 * numpy.sin(theta) / (1 / p + 1 / q) print("Toroid Rt: %9.6f m, Rs: %9.6f m" % (Rt, Rs)) height_tangential = Rt - numpy.sqrt(Rt ** 2 - y ** 2) height_sagittal = Rs - numpy.sqrt(Rs ** 2 - x ** 2) Z = numpy.zeros((x.size, y.size)) for i in range(x.size): Z[i,:] = height_tangential for i in range(y.size): Z[:,i] += height_sagittal return Z def toroid_segment_to_point( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numeric mesh of the toroid (segment to point) :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Z = toroid_point_to_segment(p=q, q=p, theta=theta, x=x, y=y) for i in range(x.size): Z[i,:] = numpy.flip(Z[i,:]) return Z def parabolic_cone_point_to_segment( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numerical mesh for an approximated diaboloid (point to segment focusing) as calculated by Valeriy Yashchuk Valeriy V. Yashchuk Diaboloid shape approximation with a sagittal conical cylinder bent to a tangential parabola: Analytical consideration Light Source Beam Line Note LSBL-1451, Advanced Light Source, Berkeley (April 14, 2020). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ X = numpy.outer(x, numpy.ones_like(y)) Y = numpy.outer(numpy.ones_like(x), y) c = numpy.cos(theta) s = numpy.sin(theta) c2 = numpy.cos(2 * theta) s2 = numpy.sin(2 * theta) pq = p + q # Equation 15 in V. Yashchuk LSBL 1451 k1 = p * q * c * s2 / pq k2 = s2 * (q - 2 * p * c**2 ) / 2 / pq Z = Y * s / c - \ 2 * s / c**2 * numpy.sqrt(Y * p * c + p**2) + \ 2 * p * s / c**2 + \ k1 + k2 * Y \ - numpy.sqrt( (k1 + k2 * Y)**2 - X**2 ) return Z, X, Y def parabolic_cone_segment_to_point( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numerical mesh for an approximated diaboloid (point to segment focusing) as calculated by Valeriy Yashchuk Valeriy V. Yashchuk Diaboloid shape approximation with a sagittal conical cylinder bent to a tangential parabola: Analytical consideration Light Source Beam Line Note LSBL-1451, Advanced Light Source, Berkeley (April 14, 2020). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Z, X, Y = parabolic_cone_point_to_segment(p=q, q=p, theta=theta, x=x, y=y) for i in range(x.size): Z[i,:] = numpy.flip(Z[i,:]) return Z, X, Y def parabolic_cone_linearized_point_to_segment( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numerical mesh for the linearized approximated diaboloid (point to segment focusing) as calculated by Valeriy Yashchuk References: Valeriy V. Yashchuk Diaboloid shape approximation with a sagittal conical cylinder bent to a tangential parabola: Analytical consideration Light Source Beam Line Note LSBL-1451, Advanced Light Source, Berkeley (April 14, 2020). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ X = numpy.outer(x, numpy.ones_like(y)) Y = numpy.outer(numpy.ones_like(x), y) c = numpy.cos(theta) s = numpy.sin(theta) c2 = numpy.cos(2 * theta) s2 = numpy.sin(2 * theta) pq = p + q # use the central meridional profile like in parabolic_cone_point_to_segment() Z = Y * s / c - 2 * s / c**2 * numpy.sqrt(Y * p * c + p**2) + 2 * p * s / c**2 \ - numpy.sqrt( \ (p * q * c * s2 / pq + s2 * (q - 2 * p * c**2 ) / 2 / pq * Y)**2 - (X*0)**2) + \ p * q * c * s2 / pq + s2 * (q - 2 * p * c**2) / 2 / pq * Y # we add now the sagittal profile with radius calculated using Eq 11 in LSBL 1451 for j in range(y.size): # Rs = p * q * numpy.sin(2 * theta) / (p + q) # TODO missing c: = 2 p * q * sin(theta) * cos^2(theta) # Rs += (q * numpy.tan(theta) - 2 * p * numpy.sin(theta) * numpy.cos(theta)) / (p + q) * y[j] Rs = p * q * c * numpy.sin(2 * theta) / (p + q) Rs += numpy.sin(2 * theta) * (q - 2 * p * c**2) / 2 / (p + q) * y[j] height_sagittal = Rs - numpy.sqrt(Rs ** 2 - x ** 2) print("y=%f Rs=%f" % (y[j], Rs)) Z[:,j] += height_sagittal return Z, X, Y def parabolic_cone_linearized_segment_to_point( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001)): """ Create a numerical mesh for the linearized approximated diaboloid (segment to point focusing) as calculated by Valeriy Yashchuk Valeriy V. Yashchuk Diaboloid shape approximation with a sagittal conical cylinder bent to a tangential parabola: Analytical consideration Light Source Beam Line Note LSBL-1451, Advanced Light Source, Berkeley (April 14, 2020). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Z, X, Y = parabolic_cone_linearized_point_to_segment(p=q, q=p, theta=theta, x=x, y=y) for i in range(x.size): Z[i,:] = numpy.flip(Z[i,:]) return Z, X, Y def diaboloid_exact_point_to_segment( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001), ): """ Create a numerical mesh for the exact diaboloid (point to segment focusing) solving a fourth degree equation, as calculated by Valeriy Yashchuk References: Valeriy V. Yashchuk Explicit algebraic derivation of an expression for the exact shape of diaboloidal mirror in laboratory coordinate system Light Source Beam Line Note LSBL˗1462, Advanced Light Source, Berkeley (May 17, 2020). Yashchuk, V. V., Goldberg, K., Lacey, I., McKinney, W. R., Sanchez del Rio, M. & Padmore,H. Journal of Synchrotron Radiation, submitted(2021). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ X = numpy.outer(x, numpy.ones_like(y)) Y = numpy.outer(numpy.ones_like(x), y) c = numpy.cos(theta) s = numpy.sin(theta) c2 = numpy.cos(2 * theta) s2 = numpy.sin(2 * theta) # Coefficients after Valeriy V. Yashchuk LSBL-1462 May 2020 # A = −Cos[θ] 4 . # B = 4(r1 − r2)Cos[θ] 2 Sin[θ] + 4Cos[θ] 3 Sin[θ]z; # C = 4r2((r1 + r2)Cos[θ] 2 + 4r1Sin[θ] 2 ) + 2Cos[θ](−3r1 + r2 + (r1 − 3r2)Cos[2θ])z − 6(Cos[θ] 2 Sin[θ] 2 )z 2 ; # D = −16r1r2(r1 + r2)Sin[θ] + 4(r1 + r2)(2r1 − r2)Sin[2θ]z + 2(3r1 + r2 + (r1 + 3r2)Cos[2θ])Sin[θ]z 2 + 4Cos[θ]Sin[θ] 3 z 3 ; # E = 4(r1 + r2) 2 x 2 + 4r2(r1 + r2)Sin[θ] 2 z 2 − 4((r1 + r2)Cos[θ]Sin[θ] 2 )z 3 − Sin[θ] 4 z 4 ; A = -c**4 * numpy.ones_like(X) B = 4 * (p - q) * c**2 * s \ + 4 * c**3 * s * Y C = 4 * q * ( (p + q) * c**2 + 4 * p * s**2 ) \ + 2 * c * (q - 3 * p + (p - 3 * q) * c2) * Y \ - 6 * c**2 * s**2 * Y**2 D = -16 * p * q * (p + q) * s \ + 4 * (p + q) * (2 * p - q) * s2 * Y \ + 2 * (3 * p + q + (3 * q + p) * c2) * s * Y**2 \ + 4 * c * s**3 * Y**3 E = 4 * (p + q)**2 * X**2 \ + 4 * q * (p + q) * s**2 * Y**2 \ - 4 * (p + q) * c * s**2 * Y**3 \ - s**4 * Y**4 # get good solution: the one that is zero at (0,0) ix = x.size // 2 iy = y.size // 2 solutions = single_quartic(A[ix, iy], B[ix, iy], C[ix, iy], D[ix, iy], E[ix, iy]) aa = [] for sol in solutions: if numpy.abs(sol.imag) < 1e-15: aa.append(numpy.abs(sol.real)) else: aa.append(1e10) isel = numpy.argmin(aa) # calculate solutions array P = numpy.zeros((A.size, 5)) P[:, 0] = A.flatten() P[:, 1] = B.flatten() P[:, 2] = C.flatten() P[:, 3] = D.flatten() P[:, 4] = E.flatten() SOLUTION = quartic_roots(P) # return result SOLUTION_GOOD = (SOLUTION[:,isel]).flatten() SOLUTION_GOOD.shape = A.shape Z = SOLUTION_GOOD.real return Z, X, Y def diaboloid_exact_segment_to_point( p=29.3, q=19.53, theta=4.5e-3, x=numpy.linspace(-0.01, 0.01, 101), y=numpy.linspace(-0.1, 0.1, 1001), ): """ Create a numerical mesh for the exact diaboloid (segment to point focusing) solving a fourth degree equation, as calculated by Valeriy Yashchuk References: Valeriy V. Yashchuk Explicit algebraic derivation of an expression for the exact shape of diaboloidal mirror in laboratory coordinate system Light Source Beam Line Note LSBL˗1462, Advanced Light Source, Berkeley (May 17, 2020). Yashchuk, V. V., Goldberg, K., Lacey, I., McKinney, W. R., Sanchez del Rio, M. & Padmore,H. Journal of Synchrotron Radiation, submitted(2021). :param p: distance source to mirror [m] :param q: distance mirror to focus [m] :param theta: grazing incidence angle [rad] :param y: x (sagittal) array :param y: y (tangential) array :return: Z, X, Y """ Z, X, Y = diaboloid_exact_point_to_segment(p=q, q=p, theta=theta, x=x, y=y) for i in range(x.size): Z[i,:] = numpy.flip(Z[i,:]) return Z, X, Y
PypiClean
/IST411Team1AduDarko-1.0.0.tar.gz/IST411Team1AduDarko-1.0.0/IST411Team1/node4.py
import Pyro4, sys, subprocess, pika, zlib, serpent, time, p2p from Crypto.Cipher import AES from Crypto.Hash import SHA256 from node5 import Node5 beginTime = p2p.start() class node4: def __init__(self,name): self.json = None self.name = name self.payloadHash = None self.cipherText = None def receiveJSON(self,argv): try: print(self.name + " is pulling in the remote object's URI.") self.uri = argv log = {"Node":self.name,"URI Received from Node 3":str(self.uri)} Node5.log(log) print(self.name + " is creating a remote object with the URI.") json_getter = Pyro4.Proxy(self.uri) print(self.name + " is getting the JSON from the remote object's get_json().") compJson = json_getter.get_json() print("Decompressing received payload...") self.json = zlib.decompress(serpent.tobytes(compJson)) checksum = json_getter.getChecksum() try: json_getter.shutDown() except: pass log = {"Node":self.name,"JSON Received via Pyro4":str(self.json),"Checksum received via Pyro4":str(checksum)} Node5.log(log) print(self.name," JSON received: ", self.json) print(self.name," Verifying data integrity with checksum match...") genChecksum = zlib.crc32(self.json) if checksum == genChecksum: print(self.name,": checksum match indicates that received data is intact.") log = {"Node":self.name,"Checksum received via Pyro4 matches":"True","Checksum received via Pyro4":str(checksum),"Checksum generated for matching":str(genChecksum)} Node5.log(log) return self.json else: print(self.name,": checksum non-match indicates that received data has been compromised.") except Exception as e: print(e) def sendJSON(self): sent = False try: print(self.name," connecting to localhost") connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost')) channel = connection.channel() channel.queue_declare('node4to1') print(self.name," channel connected.") #self.aesEncrypt() channel.basic_publish(exchange='',routing_key='node4to1',body=self.cipherText) log = {"Node":self.name,"AES Encrypted message sent":"True"} Node5.log(log) sent = True return sent connection.close() except Exception as e: Sent = False print(e) return sent def aesEncrypt(self): try: pad = b' ' key = 'This is a key123' obj = AES.new(key,AES.MODE_CBC,'This is an IV456') payload = self.json length = 16 - (len(payload)%16) payload += length*pad print(self.name," is encrypting the JSON with AES") self.cipherText = obj.encrypt(payload) print(self.name,"'s encrypted payload: ",self.cipherText) log = {"Node":self.name,"AES Encrypted Payload":str(self.cipherText)} Node5.log(log) return self.cipherText except Exception as e: print(e) if __name__ == "__main__": print("Starting Node 4...") node4 = node4("Node 4") node4.receiveJSON(sys.argv[1]) node4.aesEncrypt() p2pTime = p2p.end(beginTime) Node5.log({"Node":node4.name,"P2P payload time in seconds":p2pTime}) print(node4.name," to Node 1 payload time: ",p2pTime," seconds") node4.sendJSON()
PypiClean
/Bluebook-0.0.1.tar.gz/Bluebook-0.0.1/pylot/component/__init__.py
import re import warnings import humanize import jinja2 import mistune from pylot import (Pylot, utils) from flask_kvsession import KVSessionExtension import ses_mailer import flask_mail from flask_cloudstorage import Storage from flask_recaptcha import ReCaptcha from flask_seasurf import SeaSurf import flask_cache from six.moves.urllib.parse import urlparse class Mailer(object): """ A simple wrapper to switch between SES-Mailer and Flask-Mail based on config """ mail = None provider = None app = None def init_app(self, app): self.app = app provider = app.config.get("MAILER_PROVIDER", None) if provider: self.provider = provider.upper() if self.provider == "SES": class _App(object): config = { "SES_AWS_ACCESS_KEY": app.config.get("MAILER_SES_ACCESS_KEY"), "SES_AWS_SECRET_KEY": app.config.get("MAILER_SES_SECRET_KEY"), "SES_SENDER": app.config.get("MAILER_SENDER"), "SES_REPLY_TO": app.config.get("MAILER_REPLY_TO"), "SES_TEMPLATE": app.config.get("MAILER_TEMPLATE"), "SES_TEMPLATE_CONTEXT": app.config.get("MAILER_TEMPLATE_CONTEXT") } _app = _App() self.mail = ses_mailer.Mail(app=_app) elif self.provider == "SMTP": uri = app.config.get("MAILER_SMTP_URI", None) if uri is None: raise ValueError("<Pylot:Component:Mailer: MAILER_SMTP_URI is empty'") parse_uri = urlparse(uri) if "smtp" not in parse_uri.scheme: raise ValueError("<Pylot:Component:Mailer: MAILER_SMTP_URI must start with 'smtp://'") class _App(object): config = { "MAIL_SERVER": parse_uri.hostname, "MAIL_USERNAME": parse_uri.username, "MAIL_PASSWORD": parse_uri.password, "MAIL_PORT": parse_uri.port, "MAIL_USE_TLS": True if "tls" in parse_uri.scheme else False, "MAIL_USE_SSL": True if "ssl" in parse_uri.scheme else False, "MAIL_DEFAULT_SENDER": app.config.get("MAILER_SENDER"), "TESTING": app.config.get("TESTING"), "DEBUG": app.config.get("DEBUG") } debug = app.config.get("DEBUG") testing = app.config.get("TESTING") _app = _App() self.mail = flask_mail.Mail(app=_app) else: raise warnings.warn("<Pylot:Component:Mailer invalid provider '%s'>" % provider) def send(self, to, subject, body, reply_to=None, **kwargs): """ Send simple message """ if self.provider == "SES": self.mail.send(to=to, subject=subject, body=body, reply_to=reply_to, **kwargs) elif self.provider == "SMTP": print body msg = flask_mail.Message(recipients=[to] if not isinstance(to, list) else to, subject=subject, body=body, reply_to=reply_to, sender=self.app.config.get("MAILER_SENDER")) self.mail.send(msg) def send_template(self, template, to, reply_to=None, **context): """ Send Template message """ if self.provider == "SES": self.mail.send_template(template=template, to=to, reply_to=reply_to, **context) elif self.provider == "SMTP": _template = self.app.config.get("MAILER_TEMPLATE", None) template_context = self.app.config.get("MAILER_TEMPLATER_CONTEXT") ses_mail = ses_mailer.Mail(template=_template, template_context=template_context) data = ses_mail.parse_template(template=template, **context) msg = flask_mail.Message(recipients=[to] if not isinstance(to, list) else to, subject=data["subject"], body=data["body"], reply_to=reply_to, sender=self.app.config.get("MAILER_SENDER") ) self.mail.send(msg) class Session(object): """ Use KVSession """ def __init__(self, app): store = None uri = app.config.get("SESSION_URI") if uri: parse_uri = urlparse(uri) scheme = parse_uri.scheme username = parse_uri.username password = parse_uri.password hostname = parse_uri.hostname port = parse_uri.port bucket = parse_uri.path.strip("/") if "redis" in scheme: import redis from simplekv.memory.redisstore import RedisStore conn = redis.StrictRedis.from_url(url=uri) store = RedisStore(conn) elif "s3" in scheme or "google_storage" in scheme: from simplekv.net.botostore import BotoStore import boto if "s3" in scheme: _con_fn = boto.connect_s3 else: _con_fn = boto.connect_gs conn = _con_fn(username, password) _bucket = conn.create_bucket(bucket) store = BotoStore(_bucket) elif "memcache" in scheme: import memcache from simplekv.memory.memcachestore import MemcacheStore host_port = "%s:%s" % (hostname, port) conn = memcache.Client(servers=[host_port]) store = MemcacheStore(conn) elif "sql" in scheme: from simplekv.db.sql import SQLAlchemyStore from sqlalchemy import create_engine, MetaData engine = create_engine(uri) metadata = MetaData(bind=engine) store = SQLAlchemyStore(engine, metadata, 'kvstore') metadata.create_all() else: raise ValueError("Invalid Session Store") if store: KVSessionExtension(store, app) class SocialAuth(object): def init_app(self, app): pass mailer = Mailer() cache = flask_cache.Cache() storage = Storage() recaptcha = ReCaptcha() csrf = SeaSurf() Pylot.bind(Session) Pylot.bind(mailer.init_app) Pylot.bind(storage.init_app) Pylot.bind(cache.init_app) Pylot.bind(recaptcha.init_app) Pylot.bind(csrf.init_app) # ------------------------------------------------------------------------------ def to_date(dt, format="%m/%d/%Y"): return "" if not dt else dt.strftime(format) def strip_decimal(amount): return amount.split(".")[0] def bool_to_yes(b): return "Yes" if b is True else "No" def bool_to_int(b): return 1 if b is True else 0 def nl2br(s): """ {{ s|nl2br }} Convert newlines into <p> and <br />s. """ if not isinstance(s, basestring): s = str(s) s = re.sub(r'\r\n|\r|\n', '\n', s) paragraphs = re.split('\n{2,}', s) paragraphs = ['<p>%s</p>' % p.strip().replace('\n', '<br />') for p in paragraphs] return '\n\n'.join(paragraphs) jinja2.filters.FILTERS.update({ "currency": utils.to_currency, "strip_decimal": strip_decimal, "date": to_date, "int": int, "slug": utils.slug, "intcomma": humanize.intcomma, "intword": humanize.intword, "naturalday": humanize.naturalday, "naturaldate": humanize.naturaldate, "naturaltime": humanize.naturaltime, "naturalsize": humanize.naturalsize, "bool_to_yes": bool_to_yes, "bool_to_int": bool_to_int, "nl2br": nl2br, "markdown": mistune.markdown })
PypiClean
/KayPacha-0.0.3.tar.gz/KayPacha-0.0.3/README.md
[![Python package](https://github.com/colav/KayPacha/actions/workflows/python-package.yml/badge.svg)](https://github.com/colav/KayPacha/actions/workflows/python-package.yml) <center><img src="https://raw.githubusercontent.com/colav/colav.github.io/master/img/Logo.png"/></center> # KayPacha SQL data extraction for Scienti and Colav parners Oracle databases # Description Package extract the data from SQL databases from Oracle Databases from Scienti or Colav parners Models are defined here, filters etc.. # Dependecies needed before installation Before installing the package, you need to install Graphviz ## Ubuntu and Debian `sudo apt-get install graphviz graphviz-dev` If you need more information about the installation, visit: https://pygraphviz.github.io/documentation/stable/install.html # Installation ## Package `pip install kaypacha` # Usage ## Scienti Oracle DB Colav docker database for scienti have to be already loaded, [take a look here](https://github.com/colav/oracle-docker) Remember you only can use max 2 threads due a Oracle XE version limitation [more information here](https://docs.oracle.com/en/database/oracle/oracle-database/18/xeinl/licensing-restrictions.html) Saving the model product for scienti on MongoDB (default users are UDEA_CV,UDEA_GR,UDEA_IN) ` kaypacha_scienti --mongo_dbname scienti_udea_2022 --model product --max_threads 2 --checkpoint ` Saving all models for scienti on MongoDB ` kaypacha_scienti --mongo_dbname scienti_udea_2022 --max_threads 2 --checkpoint ` Getting a JSon file sample for the model product for scienti (**WARNING**: getting the full DB in a file require a huge amount of RAM, use it with careful.) ` kaypacha_scienti --mongo_dbname scienti_udea_2022 --model product --json prod.json --max_threads 2 --sample ` ### Example university of externado ` kaypacha_scienti --mongo_dbname scienti_uec_2022 --model product --max_threads 2 --cvlac_user UEC_CV --gruplac_user UEC_GR --institulac_user UEC_IN --checkpoint ` or ` kaypacha_scienti --mongo_dbname scienti_uec_2022 --model endorsement --max_threads 2 --cvlac_user UEC_CV --gruplac_user UEC_GR --institulac_user UEC_IN --checkpoint ` ### Entities models supported fo Scienti * product (EN_PRODCUTO) * netowrk (EN_RED) * project (EN_PROYECTO) * event (EN_EVENTO) * patent (EN_PATENTE) ### TODO * implement all the main tables for Scienti. * resgiter "EN_REGISTRO" * industrial_secret "EN_SECRETO_INDUSTRIAL" * recognition "EN_RECONOCIMIENTO" * art_product "EN_PROD_ARTISTICA_DETALLE" ## SIIU Oracle DB Colav docker database for siiu have to be already loaded, [take a look here](https://github.com/colav/oracle-docker) Remember you only can use max 2 threads due a Oracle XE version limitation [more information here](https://docs.oracle.com/en/database/oracle/oracle-database/18/xeinl/licensing-restrictions.html) Saving the model project for siiu on MongoDB ` kaypacha_siiu --model project --max_threads 2 --checkpoint ` Saving all models for siiu on MongoDB ` kaypacha_siiu --max_threads 2 --checkpoint ` Getting a JSON file sample for the model product for scienti (**WARNING**: getting the full DB in a file require a huge amount of RAM, use it with careful.) Getting the first 100 registers ` kaypacha_siiu --model project --json project.json --max_threads 2 --sample ` Getting a random sample, 5.5% of the total amount of registers ` kaypacha_siiu --model project --json project.json --max_threads 2 --rand_sample --sample_percent 5.5 ` Making a graph of the model (There are two types of files supported: svg and png) ` kaypacha_siiu --make_diagram project svg ` ### Entities models supported fo SIIU * project (SIIU_PROYECTO) #### Some errors ` [WARNING] ORA-12504: TNS:listener was not given the SERVICE_NAME in CONNECT_DATA ` A possible solution is to use --ora_dburi 0.0.0.0:1521/XE # Generating Diagrams with BlockDiag Exaple for patent on scienti.<br> Also support formats such as SVG and PDF. ` kaypacha_blockdiag --model scienti --submodel patent --filename patent.png ` # License BSD-3-Clause License # Links http://colav.udea.edu.co/
PypiClean
/BoxKit-2023.6.7.tar.gz/BoxKit-2023.6.7/boxkit/api/_create.py
from types import SimpleNamespace import pymorton from .. import library def create_dataset(storage=None, **attributes): """ Create a dataset from a file Returns ------- Dataset object """ if not storage: storage = "numpy-memmap" self = SimpleNamespace( xmin=0.0, ymin=0.0, zmin=0.0, xmax=0, ymax=0.0, zmax=0.0, nxb=1, nyb=1, nzb=1, nblockx=1, nblocky=1, nblockz=1, ) for key, value in attributes.items(): if hasattr(self, key): setattr(self, key, value) else: raise ValueError(f"[boxkit.create_dataset]: Invalid attributes {key}") # Create data_attributes data_attributes = { "nblocks": int(self.nblockx * self.nblocky * self.nblockz), "nxb": int(self.nxb), "nyb": int(self.nyb), "nzb": int(self.nzb), } data = library.Data(storage=storage, **data_attributes) self.dx, self.dy, self.dz = [ (self.xmax - self.xmin) / (self.nblockx * self.nxb), (self.ymax - self.ymin) / (self.nblocky * self.nyb), (self.zmax - self.zmin) / (self.nblockz * self.nzb), ] if self.dx == 0: self.dx = 1 if self.dy == 0: self.dy = 1 if self.dz == 0: self.dz = 1 blocklist = [] for lblock in range(self.nblockx * self.nblocky * self.nblockz): block_attributes = {} block_attributes["dx"] = self.dx block_attributes["dy"] = self.dy block_attributes["dz"] = self.dz if self.nblockz == 1: block_attributes["xmin"] = ( self.xmin + pymorton.deinterleave2(lblock)[0] * self.nxb * self.dx ) block_attributes["ymin"] = ( self.ymin + pymorton.deinterleave2(lblock)[1] * self.nyb * self.dy ) block_attributes["zmin"] = self.zmin else: block_attributes["xmin"] = ( self.xmin + pymorton.deinterleave3(lblock)[0] * self.nxb * self.dx ) block_attributes["ymin"] = ( self.ymin + pymorton.deinterleave3(lblock)[1] * self.nyb * self.dy ) block_attributes["zmin"] = ( self.zmin + pymorton.deinterleave3(lblock)[2] * self.nzb * self.dz ) block_attributes["xmax"] = block_attributes["xmin"] + self.nxb * self.dx block_attributes["ymax"] = block_attributes["ymin"] + self.nyb * self.dy block_attributes["zmax"] = block_attributes["zmin"] + self.nzb * self.dz block_attributes["tag"] = lblock blocklist.append(library.Block(data, **block_attributes)) return library.Dataset(blocklist, data) def create_region(dataset, **attributes): """ Create a region from a dataset Parameters ---------- dataset : Dataset object attributes : dictionary of attributes { 'xmin' : low x bound 'ymin' : low y bound 'zmin' : low z bound 'xmax' : high x bound 'ymax' : high y bound 'zmax' : high z bound } Returns ------- Region object """ self = SimpleNamespace( xmin=dataset.xmin, ymin=dataset.ymin, zmin=dataset.zmin, xmax=dataset.xmax, ymax=dataset.ymax, zmax=dataset.zmax, ) for key, value in attributes.items(): if hasattr(self, key): setattr(self, key, value) else: raise ValueError(f"[boxkit.create_region]: Invalid attributes {key}") blocklist = [] for block in dataset.blocklist: if block.leaf: blocklist.append(block) return library.Region(blocklist, **vars(self)) def create_slice(dataset, **attributes): """ Create a slice from a dataset Parameters ---------- dataset : Dataset object attributes : dictionary of attributes { 'xmin' : low x bound 'ymin' : low y bound 'zmin' : low z bound 'xmax' : high x bound 'ymax' : high y bound 'zmax' : high z bound } Returns ------- Slice object """ self = SimpleNamespace( xmin=dataset.xmin, ymin=dataset.ymin, zmin=dataset.zmin, xmax=dataset.xmax, ymax=dataset.ymax, zmax=dataset.zmax, ) for key, value in attributes.items(): if hasattr(self, key): setattr(self, key, value) else: raise ValueError(f"[boxkit.create_slice]: Invalid attributes {key}") blocklist = [] for block in dataset.blocklist: if block.leaf: blocklist.append(block) return library.Slice(blocklist, **vars(self))
PypiClean
/OASYS1-ALS-ShadowOui-0.0.60.tar.gz/OASYS1-ALS-ShadowOui-0.0.60/orangecontrib/wofry/als/widgets/extensions/ow_reflector_grazing_1D.py
import numpy import sys from scipy import interpolate from PyQt5.QtGui import QPalette, QColor, QFont from PyQt5.QtWidgets import QMessageBox from orangewidget import gui from orangewidget.settings import Setting from oasys.widgets import gui as oasysgui from oasys.widgets import congruence from oasys.util.oasys_util import TriggerIn, EmittingStream from orangecontrib.wofry.util.wofry_objects import WofryData from orangecontrib.wofry.widgets.gui.ow_wofry_widget import WofryWidget from orangecontrib.wofry.widgets.gui.python_script import PythonScript from syned.widget.widget_decorator import WidgetDecorator from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D from shadow4.optical_surfaces.s4_conic import S4Conic from numba import jit, prange @jit(nopython=True, parallel=True) def goFromToSequential(field1, x1, y1, x2, y2, wavelength=1e-10, normalize_intensities=False): field2 = x2 * 0j wavenumber = numpy.pi * 2 / wavelength for i in prange(field2.size): r = numpy.sqrt(numpy.power(x1 - x2[i], 2) + numpy.power(y1 - y2[i], 2)) field2[i] = (field1 * numpy.exp(1.j * wavenumber * r)).sum() if normalize_intensities: field2 *= numpy.sqrt((numpy.abs(field1) ** 2).sum() / (numpy.abs(field2) ** 2).sum()) return field2 class OWReflectorGrazing1D(WofryWidget): name = "Grazing Reflector 1D" id = "WofryReflectorGrazing1D" description = "ALS Grazing Reflector 1D" icon = "icons/reflector_grazing1D.png" priority = 4 category = "Wofry Wavefront Propagation" keywords = ["data", "file", "load", "read", "grazing"] outputs = [{"name":"WofryData", "type":WofryData, "doc":"WofryData", "id":"WofryData"}, {"name":"Trigger", "type": TriggerIn, "doc":"Feedback signal to start a new beam simulation", "id":"Trigger"}] inputs = [("WofryData", WofryData, "set_input"), ("GenericWavefront1D", GenericWavefront1D, "set_input"), ("DABAM 1D Profile", numpy.ndarray, "receive_dabam_profile"), WidgetDecorator.syned_input_data()[0]] grazing_angle_in = Setting(1.5e-3) p_distance = Setting(1.0) q_distance = Setting(1.0) zoom_factor = Setting(1.0) shape = Setting(1) p_focus = Setting(1.0) q_focus = Setting(1.0) error_flag = Setting(0) error_file = Setting("<none>") mirror_length = Setting(0.2) mirror_points = Setting(500) write_profile = Setting(0) write_input_wavefront = Setting(0) input_data = None titles = ["Wavefront 1D Intensity", "Wavefront 1D Phase","Wavefront Real(Amplitude)","Wavefront Imag(Amplitude)","O.E. Profile"] def __init__(self): super().__init__(is_automatic=True, show_view_options=True) # # add script tab to tabs panel # script_tab = oasysgui.createTabPage(self.main_tabs, "Script") self.wofry_script = PythonScript() self.wofry_script.code_area.setFixedHeight(400) script_box = gui.widgetBox(script_tab, "Python script", addSpace=True, orientation="horizontal") script_box.layout().addWidget(self.wofry_script) # # build control panel # button_box = oasysgui.widgetBox(self.controlArea, "", addSpace=False, orientation="horizontal") button = gui.button(button_box, self, "Propagate Wavefront", callback=self.propagate_wavefront) font = QFont(button.font()) font.setBold(True) button.setFont(font) palette = QPalette(button.palette()) # make a copy of the palette palette.setColor(QPalette.ButtonText, QColor('Dark Blue')) button.setPalette(palette) # assign new palette button.setFixedHeight(45) gui.separator(self.controlArea) self.controlArea.setFixedWidth(self.CONTROL_AREA_WIDTH) tabs_setting = oasysgui.tabWidget(self.controlArea) tabs_setting.setFixedHeight(self.TABS_AREA_HEIGHT + 50) tabs_setting.setFixedWidth(self.CONTROL_AREA_WIDTH-5) self.tab_sou = oasysgui.createTabPage(tabs_setting, "Generic Reflector 1D Settings") box_reflector = oasysgui.widgetBox(self.tab_sou, "Reflector", addSpace=False, orientation="vertical") oasysgui.lineEdit(box_reflector, self, "grazing_angle_in", "Grazing incidence angle [rad]", labelWidth=300, valueType=float, orientation="horizontal") gui.comboBox(box_reflector, self, "shape", label="Reflector shape", items=["Flat","Circle","Ellipse","Parabola"], sendSelectedValue=False, orientation="horizontal",callback=self.set_visible) self.box_focal_id = oasysgui.widgetBox(box_reflector, "", addSpace=True, orientation="vertical") oasysgui.lineEdit(self.box_focal_id, self, "p_focus", "Focal entrance arm [m]", labelWidth=300, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.box_focal_id, self, "q_focus", "Focal exit arm [m]", labelWidth=300, valueType=float, orientation="horizontal") gui.comboBox(box_reflector, self, "error_flag", label="Add profile deformation", items=["No","Yes (from file)"], callback=self.set_visible, sendSelectedValue=False, orientation="horizontal") self.file_box_id = oasysgui.widgetBox(box_reflector, "", addSpace=True, orientation="horizontal") self.error_file_id = oasysgui.lineEdit(self.file_box_id, self, "error_file", "Error file X[m] Y[m]", labelWidth=120, valueType=str, orientation="horizontal") gui.button(self.file_box_id, self, "...", callback=self.set_error_file) self.mirror_box_id = oasysgui.widgetBox(box_reflector, "", addSpace=True, orientation="vertical") oasysgui.lineEdit(self.mirror_box_id, self, "mirror_length", "Mirror length [m]", labelWidth=300, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.mirror_box_id, self, "mirror_points", "Points on mirror", labelWidth=300, valueType=int, orientation="horizontal") gui.comboBox(box_reflector, self, "write_profile", label="Dump profile to file", items=["No","Yes [reflector_profile1D.dat]"], sendSelectedValue=False, orientation="horizontal") box_propagator = oasysgui.widgetBox(self.tab_sou, "Propagator", addSpace=False, orientation="vertical") oasysgui.lineEdit(box_propagator, self, "p_distance", "Entrance arm [m]", valueType=float, orientation="horizontal") oasysgui.lineEdit(box_propagator, self, "q_distance", "Exit arm [m]", valueType=float, orientation="horizontal") oasysgui.lineEdit(box_propagator, self, "zoom_factor", "Zoom factor", valueType=float, orientation="horizontal") gui.comboBox(box_propagator, self, "write_input_wavefront", label="Input wf to file (for script)", items=["No","Yes [wavefront_input.h5]"], sendSelectedValue=False, orientation="horizontal") self.set_visible() def set_visible(self): self.file_box_id.setVisible(self.error_flag) self.box_focal_id.setVisible(self.shape) self.mirror_box_id.setVisible(self.error_flag == 0) def set_error_file(self): self.error_file_id.setText(oasysgui.selectFileFromDialog(self, self.error_file, "Open file with profile error")) def initializeTabs(self): size = len(self.tab) indexes = range(0, size) for index in indexes: self.tabs.removeTab(size-1-index) self.tab = [] self.plot_canvas = [] for index in range(0, len(self.titles)): self.tab.append(gui.createTabPage(self.tabs, self.titles[index])) self.plot_canvas.append(None) for tab in self.tab: tab.setFixedHeight(self.IMAGE_HEIGHT) tab.setFixedWidth(self.IMAGE_WIDTH) def check_fields(self): self.grazing_angle_in = congruence.checkStrictlyPositiveNumber(self.grazing_angle_in, "Grazing incidence angle") self.p_distance = congruence.checkNumber(self.p_distance, "Entrance arm") self.q_distance = congruence.checkNumber(self.q_distance, "Exit arm") self.zoom_factor = congruence.checkStrictlyPositiveNumber(self.zoom_factor, "Zoom factor") self.p_focus = congruence.checkNumber(self.p_focus, "p focus") self.q_focus = congruence.checkNumber(self.q_focus, "q focus") self.error_file = congruence.checkFileName(self.error_file) def receive_syned_data(self): raise Exception(NotImplementedError) def set_input(self, wofry_data): if not wofry_data is None: if isinstance(wofry_data, WofryData): self.input_data = wofry_data else: self.input_data = WofryData(wavefront=wofry_data) if self.is_automatic_execution: self.propagate_wavefront() def receive_dabam_profile(self, dabam_profile): if not dabam_profile is None: try: file_name = "dabam_profile_" + str(id(self)) + ".dat" file = open(file_name, "w") for element in dabam_profile: file.write(str(element[0]) + " " + str(element[1]) + "\n") file.flush() file.close() self.error_flag = 1 self.error_file = file_name self.set_visible() except Exception as exception: QMessageBox.critical(self, "Error", exception.args[0], QMessageBox.Ok) if self.IS_DEVELOP: raise exception def propagate_wavefront(self): self.progressBarInit() self.wofry_output.setText("") sys.stdout = EmittingStream(textWritten=self.writeStdOut) self.check_fields() if self.input_data.get_wavefront() is None: raise Exception("No Input Wavefront") if self.error_flag == 0: error_file = "" mirror_length = self.mirror_length mirror_points = self.mirror_points else: error_file = self.error_file mirror_length = 0 mirror_points = 0 output_wavefront, abscissas_on_mirror, height = self.calculate_output_wavefront_after_grazing_reflector1D( self.input_data.get_wavefront(), shape=self.shape, p_focus=self.p_focus, q_focus=self.q_focus, grazing_angle_in=self.grazing_angle_in, p_distance=self.p_distance, q_distance=self.q_distance, zoom_factor=self.zoom_factor, error_flag=self.error_flag, error_file=error_file, mirror_length=mirror_length, mirror_points=mirror_points, write_profile=self.write_profile) if self.write_input_wavefront: self.input_data.get_wavefront().save_h5_file("wavefront_input.h5",subgroupname="wfr",intensity=True,phase=True,overwrite=True,verbose=True) # script dict_parameters = {"grazing_angle_in": self.grazing_angle_in, "p_distance": self.p_distance, "q_distance": self.q_distance, "zoom_factor": self.zoom_factor, "shape": self.shape, "p_focus": self.p_focus, "q_focus": self.q_focus, "error_flag":self.error_flag, "error_file":error_file, "mirror_length":mirror_length, "mirror_points":mirror_points, "write_profile":self.write_profile} script_template = self.script_template_output_wavefront() self.wofry_script.set_code(script_template.format_map(dict_parameters)) if self.view_type > 0: self.do_plot_wavefront(output_wavefront, abscissas_on_mirror, height) self.progressBarFinished() # send beamline = self.input_data.get_beamline().duplicate() # TODO add element here self.send("WofryData", WofryData(beamline=beamline, wavefront=output_wavefront)) self.send("Trigger", TriggerIn(new_object=True)) @classmethod def propagator1D_offaxis(cls, input_wavefront, x2_oe, y2_oe, p, q, theta_grazing_in, theta_grazing_out=None, zoom_factor=1.0, normalize_intensities=False): from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D if theta_grazing_out is None: theta_grazing_out = theta_grazing_in x1 = input_wavefront.get_abscissas() field1 = input_wavefront.get_complex_amplitude() wavelength = input_wavefront.get_wavelength() x1_oe = -p * numpy.cos(theta_grazing_in) + x1 * numpy.sin(theta_grazing_in) y1_oe = p * numpy.sin(theta_grazing_in) + x1 * numpy.cos(theta_grazing_in) # field2 is the electric field in the mirror field2 = goFromToSequential(field1, x1_oe, y1_oe, x2_oe, y2_oe, wavelength=wavelength, normalize_intensities=normalize_intensities) x3 = x1 * zoom_factor x3_oe = q * numpy.cos(theta_grazing_out) + x3 * numpy.sin(theta_grazing_out) y3_oe = q * numpy.sin(theta_grazing_out) + x3 * numpy.cos(theta_grazing_out) # field2 is the electric field in the image plane field3 = goFromToSequential(field2, x2_oe, y2_oe, x3_oe, y3_oe, wavelength=wavelength, normalize_intensities=normalize_intensities) output_wavefront = GenericWavefront1D.initialize_wavefront_from_arrays(x3, field3 / numpy.sqrt(zoom_factor), wavelength=wavelength) return output_wavefront @classmethod def calculate_output_wavefront_after_grazing_reflector1D(cls,input_wavefront, shape=1, p_focus=1.0, q_focus=1.0, grazing_angle_in=1.5e-3, p_distance=1.0, q_distance=1.0, zoom_factor=1.0, error_flag=0, error_file="", mirror_length=0.1, mirror_points=1000, write_profile=0): x1 = input_wavefront.get_abscissas() field1 = input_wavefront.get_complex_amplitude() if error_flag == 0: # no profile file x2_oe = numpy.linspace(-0.5 * mirror_length, 0.5 * mirror_length, mirror_points) # x1 / numpy.sin(grazing_angle_in) y2_oe = numpy.zeros_like(x2_oe) else: a = numpy.loadtxt(error_file) x2_oe = a[:, 0] y2_oe = a[:, 1] if shape == 0: height = numpy.zeros_like(x2_oe) elif shape == 1: ccc = S4Conic.initialize_as_sphere_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height elif shape == 2: ccc = S4Conic.initialize_as_ellipsoid_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height elif shape == 3: ccc = S4Conic.initialize_as_paraboloid_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height else: raise Exception("Wrong shape") output_wavefront = cls.propagator1D_offaxis(input_wavefront, x2_oe, y2_oe, p_distance,q_distance, grazing_angle_in, zoom_factor=zoom_factor,normalize_intensities=True) # output files if write_profile: f = open("reflector_profile1D.dat","w") for i in range(height.size): f.write("%g %g\n"%(x2_oe[i],y2_oe[i])) f.close() print("File reflector_profile1D.dat written to disk.") return output_wavefront, x2_oe, y2_oe # warning: pay attention to the double backslash in \\n def script_template_output_wavefront(self): return \ """ import numpy from shadow4.optical_surfaces.s4_conic import S4Conic from numba import jit, prange @jit(nopython=True, parallel=True) def goFromToSequential(field1, x1, y1, x2, y2, wavelength=1e-10, normalize_intensities=False): field2 = x2 * 0j wavenumber = numpy.pi * 2 / wavelength for i in prange(field2.size): r = numpy.sqrt(numpy.power(x1 - x2[i], 2) + numpy.power(y1 - y2[i], 2)) field2[i] = (field1 * numpy.exp(1.j * wavenumber * r)).sum() if normalize_intensities: field2 *= numpy.sqrt((numpy.abs(field1) ** 2).sum() / (numpy.abs(field2) ** 2).sum()) return field2 def propagator1D_offaxis(input_wavefront, x2_oe, y2_oe, p, q, theta_grazing_in, theta_grazing_out=None, zoom_factor=1.0, normalize_intensities=False): from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D if theta_grazing_out is None: theta_grazing_out = theta_grazing_in x1 = input_wavefront.get_abscissas() field1 = input_wavefront.get_complex_amplitude() wavelength = input_wavefront.get_wavelength() x1_oe = -p * numpy.cos(theta_grazing_in) + x1 * numpy.sin(theta_grazing_in) y1_oe = p * numpy.sin(theta_grazing_in) + x1 * numpy.cos(theta_grazing_in) # field2 is the electric field in the mirror field2 = goFromToSequential(field1, x1_oe, y1_oe, x2_oe, y2_oe, wavelength=wavelength, normalize_intensities=normalize_intensities) x3 = x1 * zoom_factor x3_oe = q * numpy.cos(theta_grazing_out) + x3 * numpy.sin(theta_grazing_out) y3_oe = q * numpy.sin(theta_grazing_out) + x3 * numpy.cos(theta_grazing_out) # field2 is the electric field in the image plane field3 = goFromToSequential(field2, x2_oe, y2_oe, x3_oe, y3_oe, wavelength=wavelength, normalize_intensities=normalize_intensities) output_wavefront = GenericWavefront1D.initialize_wavefront_from_arrays(x3, field3 / numpy.sqrt(zoom_factor), wavelength=wavelength) return output_wavefront def calculate_output_wavefront_after_grazing_reflector1D(input_wavefront,shape=1, p_focus=1.0, q_focus=1.0, grazing_angle_in=1.5e-3, p_distance=1.0, q_distance=1.0, mirror_length=0.1, mirror_points=1000, zoom_factor=1.0, error_flag=0, error_file="", write_profile=0): x1 = input_wavefront.get_abscissas() field1 = input_wavefront.get_complex_amplitude() if error_flag == 0: # no profile file x2_oe = numpy.linspace(-0.5 * mirror_length, 0.5 * mirror_length, mirror_points) y2_oe = numpy.zeros_like(x2_oe) else: a = numpy.loadtxt(error_file) x2_oe = a[:, 0] y2_oe = a[:, 1] if shape == 0: pass elif shape == 1: ccc = S4Conic.initialize_as_sphere_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height elif shape == 2: ccc = S4Conic.initialize_as_ellipsoid_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height elif shape == 3: ccc = S4Conic.initialize_as_paraboloid_from_focal_distances(p_focus, q_focus, grazing_angle_in) height = ccc.height(x2_oe) print(ccc.info()) y2_oe += height else: raise Exception("Wrong shape") output_wavefront = propagator1D_offaxis(input_wavefront, x2_oe, y2_oe, p_distance,q_distance, grazing_angle_in, zoom_factor=zoom_factor,normalize_intensities=True) # output files if write_profile: f = open("reflector_profile1D.dat","w") for i in range(height.size): f.write("%g %g\\n"%(x2_oe[i],y2_oe[i])) f.close() print("File reflector_profile1D.dat written to disk.") return output_wavefront, x2_oe, y2_oe # # main # from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D input_wavefront = GenericWavefront1D.load_h5_file("wavefront_input.h5","wfr") output_wavefront, abscissas_on_mirror, height = calculate_output_wavefront_after_grazing_reflector1D(input_wavefront,shape={shape},p_focus={p_focus},q_focus={q_focus},grazing_angle_in={grazing_angle_in},p_distance={p_distance},q_distance={q_distance},zoom_factor={zoom_factor},error_flag={error_flag},error_file="{error_file}",write_profile={write_profile}) from srxraylib.plot.gol import plot plot(output_wavefront.get_abscissas(),output_wavefront.get_intensity()) """ def do_plot_results(self, progressBarValue): # required by parent pass def do_plot_wavefront(self, wavefront1D, abscissas_on_mirror, height, progressBarValue=80): if not self.input_data is None: self.progressBarSet(progressBarValue) self.plot_data1D(x=1e6*wavefront1D.get_abscissas(), y=wavefront1D.get_intensity(), progressBarValue=progressBarValue, tabs_canvas_index=0, plot_canvas_index=0, calculate_fwhm=True, title=self.titles[0], xtitle="Spatial Coordinate [$\mu$m]", ytitle="Intensity") self.plot_data1D(x=1e6*wavefront1D.get_abscissas(), y=wavefront1D.get_phase(from_minimum_intensity=0.1,unwrap=1), progressBarValue=progressBarValue + 10, tabs_canvas_index=1, plot_canvas_index=1, calculate_fwhm=False, title=self.titles[1], xtitle="Spatial Coordinate [$\mu$m]", ytitle="Phase [unwrapped, for intensity > 10% of peak] (rad)") self.plot_data1D(x=1e6*wavefront1D.get_abscissas(), y=numpy.real(wavefront1D.get_complex_amplitude()), progressBarValue=progressBarValue + 10, tabs_canvas_index=2, plot_canvas_index=2, calculate_fwhm=False, title=self.titles[2], xtitle="Spatial Coordinate [$\mu$m]", ytitle="Real(Amplitude)") self.plot_data1D(x=1e6*wavefront1D.get_abscissas(), y=numpy.imag(wavefront1D.get_complex_amplitude()), progressBarValue=progressBarValue + 10, tabs_canvas_index=3, plot_canvas_index=3, calculate_fwhm=False, title=self.titles[3], xtitle="Spatial Coordinate [$\mu$m]", ytitle="Imag(Amplitude)") self.plot_data1D(x=abscissas_on_mirror, y=1e6*height, progressBarValue=progressBarValue + 10, tabs_canvas_index=4, plot_canvas_index=4, calculate_fwhm=False, title=self.titles[4], xtitle="Spatial Coordinate along o.e. [m]", ytitle="Profile Height [$\mu$m]") self.plot_canvas[0].resetZoom() if __name__ == '__main__': from PyQt5.QtWidgets import QApplication def create_wavefront(): # # create input_wavefront # from wofry.propagator.wavefront1D.generic_wavefront import GenericWavefront1D input_wavefront = GenericWavefront1D.initialize_wavefront_from_range(x_min=-0.00147, x_max=0.00147, number_of_points=1000) input_wavefront.set_photon_energy(250) input_wavefront.set_spherical_wave(radius=13.73, center=0, complex_amplitude=complex(1, 0)) return input_wavefront app = QApplication([]) ow = OWReflectorGrazing1D() ow.set_input(create_wavefront()) # ow.receive_dabam_profile(numpy.array([[1,2],[3,4]])) ow.propagate_wavefront() ow.show() app.exec_() ow.saveSettings()
PypiClean
/CSUMMDET-1.0.23.tar.gz/CSUMMDET-1.0.23/mmdet/ops/nms/nms_wrapper.py
import numpy as np import torch from . import nms_cpu, nms_cuda from .soft_nms_cpu import soft_nms_cpu def nms(dets, iou_thr, device_id=None): """Dispatch to either CPU or GPU NMS implementations. The input can be either a torch tensor or numpy array. GPU NMS will be used if the input is a gpu tensor or device_id is specified, otherwise CPU NMS will be used. The returned type will always be the same as inputs. Arguments: dets (torch.Tensor or np.ndarray): bboxes with scores. iou_thr (float): IoU threshold for NMS. device_id (int, optional): when `dets` is a numpy array, if `device_id` is None, then cpu nms is used, otherwise gpu_nms will be used. Returns: tuple: kept bboxes and indice, which is always the same data type as the input. Example: >>> dets = np.array([[49.1, 32.4, 51.0, 35.9, 0.9], >>> [49.3, 32.9, 51.0, 35.3, 0.9], >>> [49.2, 31.8, 51.0, 35.4, 0.5], >>> [35.1, 11.5, 39.1, 15.7, 0.5], >>> [35.6, 11.8, 39.3, 14.2, 0.5], >>> [35.3, 11.5, 39.9, 14.5, 0.4], >>> [35.2, 11.7, 39.7, 15.7, 0.3]], dtype=np.float32) >>> iou_thr = 0.7 >>> supressed, inds = nms(dets, iou_thr) >>> assert len(inds) == len(supressed) == 3 """ # convert dets (tensor or numpy array) to tensor if isinstance(dets, torch.Tensor): is_numpy = False dets_th = dets elif isinstance(dets, np.ndarray): is_numpy = True device = 'cpu' if device_id is None else 'cuda:{}'.format(device_id) dets_th = torch.from_numpy(dets).to(device) else: raise TypeError( 'dets must be either a Tensor or numpy array, but got {}'.format( type(dets))) # execute cpu or cuda nms if dets_th.shape[0] == 0: inds = dets_th.new_zeros(0, dtype=torch.long) else: if dets_th.is_cuda: inds = nms_cuda.nms(dets_th, iou_thr) else: inds = nms_cpu.nms(dets_th, iou_thr) if is_numpy: inds = inds.cpu().numpy() return dets[inds, :], inds def soft_nms(dets, iou_thr, method='linear', sigma=0.5, min_score=1e-3): """ Example: >>> dets = np.array([[4., 3., 5., 3., 0.9], >>> [4., 3., 5., 4., 0.9], >>> [3., 1., 3., 1., 0.5], >>> [3., 1., 3., 1., 0.5], >>> [3., 1., 3., 1., 0.4], >>> [3., 1., 3., 1., 0.0]], dtype=np.float32) >>> iou_thr = 0.7 >>> supressed, inds = soft_nms(dets, iou_thr, sigma=0.5) >>> assert len(inds) == len(supressed) == 3 """ if isinstance(dets, torch.Tensor): is_tensor = True dets_np = dets.detach().cpu().numpy() elif isinstance(dets, np.ndarray): is_tensor = False dets_np = dets else: raise TypeError( 'dets must be either a Tensor or numpy array, but got {}'.format( type(dets))) method_codes = {'linear': 1, 'gaussian': 2} if method not in method_codes: raise ValueError('Invalid method for SoftNMS: {}'.format(method)) new_dets, inds = soft_nms_cpu( dets_np, iou_thr, method=method_codes[method], sigma=sigma, min_score=min_score) if is_tensor: return dets.new_tensor(new_dets), dets.new_tensor( inds, dtype=torch.long) else: return new_dets.astype(np.float32), inds.astype(np.int64)
PypiClean
/GaiaXPy-2.1.0.tar.gz/GaiaXPy-2.1.0/gaiaxpy/spectrum/xp_continuous_spectrum.py
import numpy as np from gaiaxpy.core.generic_functions import array_to_symmetric_matrix from gaiaxpy.core.satellite import BANDS from .utils import _list_to_array, get_covariance_matrix from .xp_spectrum import XpSpectrum class XpContinuousSpectrum(XpSpectrum): """ A Gaia BP/RP spectrum represented as a continuous function defined as the sum of a set of bases functions multiplied by a set of coefficient. This definition is the result of the least squares fit. Covariance and standard deviation for the least square solution are also part of the continuous spectrum definition to allow estimating errors. """ def __init__(self, source_id, xp, coefficients, covariance, standard_deviation): """ Initialise XP continuous spectrum. Args: source_id (str): Source identifier. xp (str): Gaia photometer, can be either 'bp' or 'rp'. coefficients (ndarray): 1D array containing the coefficients multiplying the basis functions. covariance (ndarray): 2D array containing the covariance of the least squares solution. standard_deviation (float): Standard deviation of the least squares solution. """ XpSpectrum.__init__(self, source_id, xp) self.coefficients = coefficients self.covariance = covariance self.standard_deviation = standard_deviation self.basis_function_id = {BANDS.bp: 56, BANDS.rp: 57} @classmethod def from_data_frame(cls, df, band): """ Initialise XP continuous spectrum from a Pandas DataFrame. Args: df (DataFrame): DataFrame containing at least the fields source_id, BAND_n_parameters, BAND_coefficients, BAND_coefficient_correlations, BAND_standard_deviation, where BAND is either 'bp' or 'rp'. The same structure as used in the archive for the correlation matrix is expected. band (str): Gaia photometer, can be either 'bp' or 'rp'. """ corr = array_to_symmetric_matrix(df[f'{band}_coefficient_correlations'], df[f'{band}_n_parameters']) df[f'{band}_coefficient_correlations'] = corr cov = get_covariance_matrix(df, band) return cls(df['source_id'], band, df[f'{band}_coefficients'], cov, df[f'{band}_standard_deviation']) @classmethod def get_units(cls): return dict() def get_coefficients(self): """ Get the coefficients associated with the spectrum. Returns: ndarray: The 1D array of the coefficients multiplying the basis functions. """ return self.coefficients def get_covariance(self): """ Get the covariance associated with the spectrum. Returns: ndarray: The 2D array of the covariance matrix. """ return self.covariance def get_standard_deviation(self): """ Get the standard deviation associated with the spectrum. Returns: float: The standard deviation of the least squares solution. """ return self.standard_deviation def spectrum_to_dict(self): """ Represent spectrum as dictionary. Returns: dict: A dictionary populated with the minimum set of parameters that need to be stored for this object. This is optimised for writing large number of sampled spectra and for this reason the array of positions is NOT included as it is expected to be the same for a batch of spectra. The array fo positions can be retrieved calling the sampling_to_dict method. """ diagonal = np.sqrt(np.diag(self.covariance)) diagonal_inv = np.diag(1.0 / diagonal) correlation_matrix = np.matmul(np.matmul(diagonal_inv, self.covariance), diagonal_inv) return { 'source_id': self.source_id, 'xp': self.xp.upper(), 'standard_deviation': self.standard_deviation, 'coefficients': _list_to_array(self.coefficients), 'coefficient_correlations': _list_to_array(_extract_lower_triangle(correlation_matrix)), 'coefficient_errors': _list_to_array(diagonal), 'n_parameters': len(self.coefficients), 'basis_function_id': self.basis_function_id[self.xp] } def _extract_lower_triangle(matrix): """ Extract the lower triangle of the matrix without including the diagonal. """ # Get the indices indices = np.tril_indices(matrix.shape[0], k=-1) return np.array([matrix[i][j] for i, j in zip(indices[0], indices[1])])
PypiClean
/BotEXBotBase-3.1.3.tar.gz/BotEXBotBase-3.1.3/redbot/cogs/permissions/permissions.py
import asyncio import io import textwrap from copy import copy from typing import Union, Optional, Dict, List, Tuple, Any, Iterator, ItemsView, cast import discord import yaml from schema import And, Or, Schema, SchemaError, Optional as UseOptional from redbot.core import checks, commands, config from redbot.core.bot import Red from redbot.core.i18n import Translator, cog_i18n from redbot.core.utils.chat_formatting import box from redbot.core.utils.menus import start_adding_reactions from redbot.core.utils.predicates import ReactionPredicate, MessagePredicate from .converters import ( CogOrCommand, RuleType, ClearableRuleType, GuildUniqueObjectFinder, GlobalUniqueObjectFinder, ) _ = Translator("Permissions", __file__) COG = "COG" COMMAND = "COMMAND" GLOBAL = 0 # The strings in the schema are constants and should get extracted, but not translated until # runtime. translate = _ _ = lambda s: s YAML_SCHEMA = Schema( Or( { UseOptional(COMMAND): Or( { str: And( { Or(int, "default"): And( bool, error=_("Rules must be either `true` or `false`.") ) }, error=_("Keys under command names must be IDs (numbers) or `default`."), ) }, {}, error=_("Keys under `COMMAND` must be command names (strings)."), ), UseOptional(COG): Or( { str: Or( { Or(int, "default"): And( bool, error=_("Rules must be either `true` or `false`.") ) }, {}, error=_("Keys under cog names must be IDs or `default`."), ) }, {}, error=_("Keys under `COG` must be cog names (strings)."), ), }, {}, error=_("Top-level keys must be either `COG` or `COMMAND`."), ) ) _ = translate __version__ = "1.0.0" @cog_i18n(_) class Permissions(commands.Cog): """Customise permissions for commands and cogs.""" def __init__(self, bot: Red): super().__init__() self.bot = bot # Config Schema: # "COG" # -> Cog names... # -> Guild IDs... # -> Model IDs... # -> True|False # -> "default" # -> True|False # "COMMAND" # -> Command names... # -> Guild IDs... # -> Model IDs... # -> True|False # -> "default" # -> True|False # Note that GLOBAL rules are denoted by an ID of 0. self.config = config.Config.get_conf(self, identifier=78631113035100160) self.config.register_global(version="") self.config.register_custom(COG) self.config.register_custom(COMMAND) @commands.group(aliases=["p"]) async def permissions(self, ctx: commands.Context): """Command permission management tools.""" pass @permissions.command(name="explain") async def permissions_explain(self, ctx: commands.Context): """Explain how permissions works.""" # Apologies in advance for the translators out there... message = _( "This cog extends the default permission model of the bot. By default, many commands " "are restricted based on what the command can do.\n" "This cog allows you to refine some of those restrictions. You can allow wider or " "narrower access to most commands using it. You cannot, however, change the " "restrictions on owner-only commands.\n\n" "When additional rules are set using this cog, those rules will be checked prior to " "checking for the default restrictions of the command.\n" "Global rules (set by the owner) are checked first, then rules set for servers. If " "multiple global or server rules apply to the case, the order they are checked is:\n" " 1. Rules about a user.\n" " 2. Rules about the voice channel a user is in.\n" " 3. Rules about the text channel a command was issued in.\n" " 4. Rules about a role the user has (The highest role they have with a rule will be " "used).\n" " 5. Rules about the server a user is in (Global rules only).\n\n" "For more details, please read the [official documentation]" "(https://red-discordbot.readthedocs.io/en/v3-develop/cog_permissions.html)." ) await ctx.maybe_send_embed(message) @permissions.command(name="canrun") async def permissions_canrun( self, ctx: commands.Context, user: discord.Member, *, command: str ): """Check if a user can run a command. This will take the current context into account, such as the server and text channel. """ if not command: return await ctx.send_help() fake_message = copy(ctx.message) fake_message.author = user fake_message.content = "{}{}".format(ctx.prefix, command) com = ctx.bot.get_command(command) if com is None: out = _("No such command") else: fake_context = await ctx.bot.get_context(fake_message) try: can = await com.can_run( fake_context, check_all_parents=True, change_permission_state=False ) except commands.CommandError: can = False out = ( _("That user can run the specified command.") if can else _("That user can not run the specified command.") ) await ctx.send(out) @checks.guildowner_or_permissions(administrator=True) @permissions.group(name="acl", aliases=["yaml"]) async def permissions_acl(self, ctx: commands.Context): """Manage permissions with YAML files.""" if ctx.invoked_subcommand is None or ctx.invoked_subcommand == self.permissions_acl: # Send a little guide on YAML formatting await ctx.send( _("Example YAML for setting rules:\n") + box( textwrap.dedent( """\ COMMAND: ping: 12345678901234567: true 56789012345671234: false COG: General: 56789012345671234: true 12345678901234567: false default: false """ ), lang="yaml", ) ) @checks.is_owner() @permissions_acl.command(name="setglobal") async def permissions_acl_setglobal(self, ctx: commands.Context): """Set global rules with a YAML file. **WARNING**: This will override reset *all* global rules to the rules specified in the uploaded file. This does not validate the names of commands and cogs before setting the new rules. """ await self._permissions_acl_set(ctx, guild_id=GLOBAL, update=False) @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions_acl.command(name="setserver", aliases=["setguild"]) async def permissions_acl_setguild(self, ctx: commands.Context): """Set rules for this server with a YAML file. **WARNING**: This will override reset *all* rules in this server to the rules specified in the uploaded file. """ await self._permissions_acl_set(ctx, guild_id=ctx.guild.id, update=False) @checks.is_owner() @permissions_acl.command(name="getglobal") async def permissions_acl_getglobal(self, ctx: commands.Context): """Get a YAML file detailing all global rules.""" file = await self._yaml_get_acl(guild_id=GLOBAL) try: await ctx.author.send(file=file) except discord.Forbidden: await ctx.send(_("I'm not allowed to DM you.")) else: await ctx.send(_("I've just sent the file to you via DM.")) finally: file.close() @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions_acl.command(name="getserver", aliases=["getguild"]) async def permissions_acl_getguild(self, ctx: commands.Context): """Get a YAML file detailing all rules in this server.""" file = await self._yaml_get_acl(guild_id=ctx.guild.id) try: await ctx.author.send(file=file) except discord.Forbidden: await ctx.send(_("I'm not allowed to DM you.")) else: await ctx.send(_("I've just sent the file to you via DM.")) finally: file.close() @checks.is_owner() @permissions_acl.command(name="updateglobal") async def permissions_acl_updateglobal(self, ctx: commands.Context): """Update global rules with a YAML file. This won't touch any rules not specified in the YAML file. """ await self._permissions_acl_set(ctx, guild_id=GLOBAL, update=True) @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions_acl.command(name="updateserver", aliases=["updateguild"]) async def permissions_acl_updateguild(self, ctx: commands.Context): """Update rules for this server with a YAML file. This won't touch any rules not specified in the YAML file. """ await self._permissions_acl_set(ctx, guild_id=ctx.guild.id, update=True) @checks.is_owner() @permissions.command(name="addglobalrule") async def permissions_addglobalrule( self, ctx: commands.Context, allow_or_deny: RuleType, cog_or_command: CogOrCommand, who_or_what: GlobalUniqueObjectFinder, ): """Add a global rule to a command. `<allow_or_deny>` should be one of "allow" or "deny". `<cog_or_command>` is the cog or command to add the rule to. This is case sensitive. `<who_or_what>` is the user, channel, role or server the rule is for. """ await self._add_rule( rule=cast(bool, allow_or_deny), cog_or_cmd=cog_or_command, model_id=who_or_what.id, guild_id=0, ) await ctx.send(_("Rule added.")) @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions.command(name="addserverrule", aliases=["addguildrule"]) async def permissions_addguildrule( self, ctx: commands.Context, allow_or_deny: RuleType, cog_or_command: CogOrCommand, who_or_what: GuildUniqueObjectFinder, ): """Add a rule to a command in this server. `<allow_or_deny>` should be one of "allow" or "deny". `<cog_or_command>` is the cog or command to add the rule to. This is case sensitive. `<who_or_what>` is the user, channel or role the rule is for. """ await self._add_rule( rule=cast(bool, allow_or_deny), cog_or_cmd=cog_or_command, model_id=who_or_what.id, guild_id=ctx.guild.id, ) await ctx.send(_("Rule added.")) @checks.is_owner() @permissions.command(name="removeglobalrule") async def permissions_removeglobalrule( self, ctx: commands.Context, cog_or_command: CogOrCommand, who_or_what: GlobalUniqueObjectFinder, ): """Remove a global rule from a command. `<cog_or_command>` is the cog or command to remove the rule from. This is case sensitive. `<who_or_what>` is the user, channel, role or server the rule is for. """ await self._remove_rule( cog_or_cmd=cog_or_command, model_id=who_or_what.id, guild_id=GLOBAL ) await ctx.send(_("Rule removed.")) @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions.command(name="removeserverrule", aliases=["removeguildrule"]) async def permissions_removeguildrule( self, ctx: commands.Context, cog_or_command: CogOrCommand, *, who_or_what: GuildUniqueObjectFinder, ): """Remove a server rule from a command. `<cog_or_command>` is the cog or command to remove the rule from. This is case sensitive. `<who_or_what>` is the user, channel or role the rule is for. """ await self._remove_rule( cog_or_cmd=cog_or_command, model_id=who_or_what.id, guild_id=ctx.guild.id ) await ctx.send(_("Rule removed.")) @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions.command(name="setdefaultserverrule", aliases=["setdefaultguildrule"]) async def permissions_setdefaultguildrule( self, ctx: commands.Context, allow_or_deny: ClearableRuleType, cog_or_command: CogOrCommand ): """Set the default rule for a command in this server. This is the rule a command will default to when no other rule is found. `<allow_or_deny>` should be one of "allow", "deny" or "clear". "clear" will reset the default rule. `<cog_or_command>` is the cog or command to set the default rule for. This is case sensitive. """ await self._set_default_rule( rule=cast(Optional[bool], allow_or_deny), cog_or_cmd=cog_or_command, guild_id=ctx.guild.id, ) await ctx.send(_("Default set.")) @checks.is_owner() @permissions.command(name="setdefaultglobalrule") async def permissions_setdefaultglobalrule( self, ctx: commands.Context, allow_or_deny: ClearableRuleType, cog_or_command: CogOrCommand ): """Set the default global rule for a command. This is the rule a command will default to when no other rule is found. `<allow_or_deny>` should be one of "allow", "deny" or "clear". "clear" will reset the default rule. `<cog_or_command>` is the cog or command to set the default rule for. This is case sensitive. """ await self._set_default_rule( rule=cast(Optional[bool], allow_or_deny), cog_or_cmd=cog_or_command, guild_id=GLOBAL ) await ctx.send(_("Default set.")) @checks.is_owner() @permissions.command(name="clearglobalrules") async def permissions_clearglobalrules(self, ctx: commands.Context): """Reset all global rules.""" agreed = await self._confirm(ctx) if agreed: await self._clear_rules(guild_id=GLOBAL) await ctx.tick() @commands.guild_only() @checks.guildowner_or_permissions(administrator=True) @permissions.command(name="clearserverrules", aliases=["clearguildrules"]) async def permissions_clearguildrules(self, ctx: commands.Context): """Reset all rules in this server.""" agreed = await self._confirm(ctx) if agreed: await self._clear_rules(guild_id=ctx.guild.id) await ctx.tick() async def cog_added(self, cog: commands.Cog) -> None: """Event listener for `cog_add`. This loads rules whenever a new cog is added. """ self._load_rules_for( cog_or_command=cog, rule_dict=await self.config.custom(COMMAND, cog.__class__.__name__).all(), ) async def command_added(self, command: commands.Command) -> None: """Event listener for `command_add`. This loads rules whenever a new command is added. """ self._load_rules_for( cog_or_command=command, rule_dict=await self.config.custom(COMMAND, command.qualified_name).all(), ) async def _add_rule( self, rule: bool, cog_or_cmd: CogOrCommand, model_id: int, guild_id: int ) -> None: """Add a rule. Guild ID should be 0 for global rules. Handles config. """ if rule is True: cog_or_cmd.obj.allow_for(model_id, guild_id=guild_id) else: cog_or_cmd.obj.deny_to(model_id, guild_id=guild_id) async with self.config.custom(cog_or_cmd.type, cog_or_cmd.name).all() as rules: rules.setdefault(str(guild_id), {})[str(model_id)] = rule async def _remove_rule(self, cog_or_cmd: CogOrCommand, model_id: int, guild_id: int) -> None: """Remove a rule. Guild ID should be 0 for global rules. Handles config. """ cog_or_cmd.obj.clear_rule_for(model_id, guild_id=guild_id) guild_id, model_id = str(guild_id), str(model_id) async with self.config.custom(cog_or_cmd.type, cog_or_cmd.name).all() as rules: if guild_id in rules and rules[guild_id]: del rules[guild_id][model_id] async def _set_default_rule( self, rule: Optional[bool], cog_or_cmd: CogOrCommand, guild_id: int ) -> None: """Set the default rule. Guild ID should be 0 for the global default. Handles config. """ cog_or_cmd.obj.set_default_rule(rule, guild_id) async with self.config.custom(cog_or_cmd.type, cog_or_cmd.name).all() as rules: rules.setdefault(str(guild_id), {})["default"] = rule async def _clear_rules(self, guild_id: int) -> None: """Clear all global rules or rules for a guild. Guild ID should be 0 for global rules. Handles config. """ self.bot.clear_permission_rules(guild_id) for category in (COG, COMMAND): async with self.config.custom(category).all() as all_rules: for name, rules in all_rules.items(): rules.pop(str(guild_id), None) async def _permissions_acl_set( self, ctx: commands.Context, guild_id: int, update: bool ) -> None: """Set rules from a YAML file and handle response to users too.""" if not ctx.message.attachments: await ctx.send(_("You must upload a file.")) return try: await self._yaml_set_acl(ctx.message.attachments[0], guild_id=guild_id, update=update) except yaml.MarkedYAMLError as e: await ctx.send(_("Invalid syntax: ") + str(e)) except SchemaError as e: await ctx.send( _("Your YAML file did not match the schema: ") + translate(e.errors[-1]) ) else: await ctx.send(_("Rules set.")) async def _yaml_set_acl(self, source: discord.Attachment, guild_id: int, update: bool) -> None: """Set rules from a YAML file.""" with io.BytesIO() as fp: await source.save(fp) rules = yaml.safe_load(fp) if rules is None: rules = {} YAML_SCHEMA.validate(rules) if update is False: await self._clear_rules(guild_id) for category, getter in ((COG, self.bot.get_cog), (COMMAND, self.bot.get_command)): rules_dict = rules.get(category) if not rules_dict: continue conf = self.config.custom(category) for cmd_name, cmd_rules in rules_dict.items(): cmd_rules = {str(model_id): rule for model_id, rule in cmd_rules.items()} await conf.set_raw(cmd_name, str(guild_id), value=cmd_rules) cmd_obj = getter(cmd_name) if cmd_obj is not None: self._load_rules_for(cmd_obj, {guild_id: cmd_rules}) async def _yaml_get_acl(self, guild_id: int) -> discord.File: """Get a YAML file for all rules set in a guild.""" guild_rules = {} for category in (COG, COMMAND): guild_rules.setdefault(category, {}) rules_dict = await self.config.custom(category).all() for cmd_name, cmd_rules in rules_dict.items(): model_rules = cmd_rules.get(str(guild_id)) if model_rules is not None: guild_rules[category][cmd_name] = dict(_int_key_map(model_rules.items())) fp = io.BytesIO(yaml.dump(guild_rules, default_flow_style=False).encode("utf-8")) return discord.File(fp, filename="acl.yaml") @staticmethod async def _confirm(ctx: commands.Context) -> bool: """Ask "Are you sure?" and get the response as a bool.""" if ctx.guild is None or ctx.guild.me.permissions_in(ctx.channel).add_reactions: msg = await ctx.send(_("Are you sure?")) # noinspection PyAsyncCall task = start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS, ctx.bot.loop) pred = ReactionPredicate.yes_or_no(msg, ctx.author) try: await ctx.bot.wait_for("reaction_add", check=pred, timeout=30) except asyncio.TimeoutError: await ctx.send(_("Response timed out.")) return False else: task.cancel() agreed = pred.result finally: await msg.delete() else: await ctx.send(_("Are you sure? (y/n)")) pred = MessagePredicate.yes_or_no(ctx) try: await ctx.bot.wait_for("message", check=pred, timeout=30) except asyncio.TimeoutError: await ctx.send(_("Response timed out.")) return False else: agreed = pred.result if agreed is False: await ctx.send(_("Action cancelled.")) return agreed async def initialize(self) -> None: """Initialize this cog. This will load all rules from config onto every currently loaded command. """ await self._maybe_update_schema() await self._load_all_rules() async def _maybe_update_schema(self) -> None: """Maybe update rules set by config prior to permissions 1.0.0.""" if await self.config.version(): return old_config = await self.config.all_guilds() old_config[GLOBAL] = await self.config.all() new_cog_rules, new_cmd_rules = self._get_updated_schema(old_config) await self.config.custom(COG).set(new_cog_rules) await self.config.custom(COMMAND).set(new_cmd_rules) await self.config.version.set(__version__) _OldConfigSchema = Dict[int, Dict[str, Dict[str, Dict[str, Dict[str, List[int]]]]]] _NewConfigSchema = Dict[str, Dict[int, Dict[str, Dict[int, bool]]]] @staticmethod def _get_updated_schema( old_config: _OldConfigSchema ) -> Tuple[_NewConfigSchema, _NewConfigSchema]: # Prior to 1.0.0, the schema was in this form for both global # and guild-based rules: # "owner_models" # -> "cogs" # -> Cog names... # -> "allow" # -> [Model IDs...] # -> "deny" # -> [Model IDs...] # -> "default" # -> "allow"|"deny" # -> "commands" # -> Command names... # -> "allow" # -> [Model IDs...] # -> "deny" # -> [Model IDs...] # -> "default" # -> "allow"|"deny" new_cog_rules = {} new_cmd_rules = {} for guild_id, old_rules in old_config.items(): if "owner_models" not in old_rules: continue old_rules = old_rules["owner_models"] for category, new_rules in zip(("cogs", "commands"), (new_cog_rules, new_cmd_rules)): if category in old_rules: for name, rules in old_rules[category].items(): these_rules = new_rules.setdefault(name, {}) guild_rules = these_rules.setdefault(str(guild_id), {}) # Since allow rules would take precedence if the same model ID # sat in both the allow and deny list, we add the deny entries # first and let any conflicting allow entries overwrite. for model_id in rules.get("deny", []): guild_rules[str(model_id)] = False for model_id in rules.get("allow", []): guild_rules[str(model_id)] = True if "default" in rules: default = rules["default"] if default == "allow": guild_rules["default"] = True elif default == "deny": guild_rules["default"] = False return new_cog_rules, new_cmd_rules async def _load_all_rules(self): """Load all of this cog's rules into loaded commands and cogs.""" for category, getter in ((COG, self.bot.get_cog), (COMMAND, self.bot.get_command)): all_rules = await self.config.custom(category).all() for name, rules in all_rules.items(): obj = getter(name) if obj is None: continue self._load_rules_for(obj, rules) @staticmethod def _load_rules_for( cog_or_command: Union[commands.Command, commands.Cog], rule_dict: Dict[Union[int, str], Dict[Union[int, str], bool]], ) -> None: """Load the rules into a command or cog object. rule_dict should be a dict mapping Guild IDs to Model IDs to rules. """ for guild_id, guild_dict in _int_key_map(rule_dict.items()): for model_id, rule in _int_key_map(guild_dict.items()): if model_id == "default": cog_or_command.set_default_rule(rule, guild_id=guild_id) elif rule is True: cog_or_command.allow_for(model_id, guild_id=guild_id) elif rule is False: cog_or_command.deny_to(model_id, guild_id=guild_id) def __unload(self) -> None: self.bot.remove_listener(self.cog_added, "on_cog_add") self.bot.remove_listener(self.command_added, "on_command_add") self.bot.loop.create_task(self._unload_all_rules()) async def _unload_all_rules(self) -> None: """Unload all rules set by this cog. This is done instead of just clearing all rules, which could clear rules set by other cogs. """ for category, getter in ((COG, self.bot.get_cog), (COMMAND, self.bot.get_command)): all_rules = await self.config.custom(category).all() for name, rules in all_rules.items(): obj = getter(name) if obj is None: continue self._unload_rules_for(obj, rules) @staticmethod def _unload_rules_for( cog_or_command: Union[commands.Command, commands.Cog], rule_dict: Dict[Union[int, str], Dict[Union[int, str], bool]], ) -> None: """Unload the rules from a command or cog object. rule_dict should be a dict mapping Guild IDs to Model IDs to rules. """ for guild_id, guild_dict in _int_key_map(rule_dict.items()): for model_id in guild_dict.keys(): if model_id == "default": cog_or_command.set_default_rule(None, guild_id=guild_id) else: cog_or_command.clear_rule_for(int(model_id), guild_id=guild_id) def _int_key_map(items_view: ItemsView[str, Any]) -> Iterator[Tuple[Union[str, int], Any]]: for k, v in items_view: if k == "default": yield k, v else: yield int(k), v
PypiClean
/LEPL-5.1.3.zip/LEPL-5.1.3/src/lepl/regexp/_test/unicode.py
from unittest import TestCase #from logging import basicConfig, DEBUG from lepl import RegexpError, DEFAULT_STREAM_FACTORY from lepl.regexp.core import NfaGraph, NfaToDfa, Compiler from lepl.regexp.unicode import UnicodeAlphabet from lepl.stream.simple import StringHelper from lepl.support.lib import fmt # pylint: disable-msg=C0103, C0111, C0301, R0201, R0904 # (dude this is just a test) UNICODE = UnicodeAlphabet.instance() def _test_parser(regexp): return Compiler.single(UNICODE, regexp, 'label') def label(text): return fmt('(?P<label>{0!s})', text) class CharactersTest(TestCase): def test_unicode_dot(self): #basicConfig(level=DEBUG) c = _test_parser('.') assert label('.') == str(c), str(c) c = _test_parser('.\\.') assert label('.\\.') == str(c), str(c) def test_brackets(self): #basicConfig(level=DEBUG) c = _test_parser('a') assert label('a') == str(c), str(c) c = _test_parser('[ac]') assert label('[ac]') == str(c), str(c) c = _test_parser('[a-c]') assert label('[a-c]') == str(c), str(c) c = _test_parser('[a-cp-q]') assert label('[a-cp-q]') == str(c), str(c) c = _test_parser(r'\\') assert label(r'\\') == str(c), str(c) c = _test_parser(r'\-') assert label(r'\-') == str(c), str(c) c = _test_parser(r'[\\-x]') assert label(r'[\\-x]') == str(c), str(c) c = _test_parser('[a-bq,]') assert label('[,a-bq]') == str(c), str(c) c = _test_parser('[a-b,q]') assert label('[,a-bq]') == str(c), str(c) c = _test_parser('[,a-bq]') assert label('[,a-bq]') == str(c), str(c) c = _test_parser('[^a]') assert (r'(?P<label>[\x00-`b-\uffff])' == str(c) or r'(?P<label>[\x00-`b-\U0010ffff])' == str(c)), str(c) def test_merge(self): c = _test_parser('[a-ce-g]') assert label('[a-ce-g]') == str(c), str(c) c = _test_parser('[a-cd-f]') assert label('[a-f]') == str(c), str(c) c = _test_parser('[a-cc-e]') assert label('[a-e]') == str(c), str(c) c = _test_parser('[a-cb-d]') assert label('[a-d]') == str(c), str(c) c = _test_parser('[a-ca-c]') assert label('[a-c]') == str(c), str(c) c = _test_parser('[a-a]') assert label('a') == str(c), str(c) c = _test_parser('[e-ga-c]') assert label('[a-ce-g]') == str(c), str(c) c = _test_parser('[d-fa-c]') assert label('[a-f]') == str(c), str(c) c = _test_parser('[c-ea-c]') assert label('[a-e]') == str(c), str(c) c = _test_parser('[b-da-c]') assert label('[a-d]') == str(c), str(c) c = _test_parser('[a-gc-e]') assert label('[a-g]') == str(c), str(c) c = _test_parser('[c-ea-g]') assert label('[a-g]') == str(c), str(c) c = _test_parser('[a-eg]') assert label('[a-eg]') == str(c), str(c) c = _test_parser('[ga-e]') assert label('[a-eg]') == str(c), str(c) def test_star(self): c = _test_parser('a*') assert label('a*') == str(c), str(c) c = _test_parser('a(?:bc)*d') assert label('a(?:bc)*d') == str(c), str(c) c = _test_parser('a(?:bc)*d[e-g]*') assert label('a(?:bc)*d[e-g]*') == str(c), str(c) c = _test_parser('a[a-cx]*') assert label('a[a-cx]*') == str(c), str(c) def test_option(self): c = _test_parser('a?') assert label('a?') == str(c), str(c) c = _test_parser('a(?:bc)?d') assert label('a(?:bc)?d') == str(c), str(c) c = _test_parser('a(?:bc)?d[e-g]?') assert label('a(?:bc)?d[e-g]?') == str(c), str(c) c = _test_parser('ab?c') assert label('ab?c') == str(c), str(c) def test_choice(self): #basicConfig(level=DEBUG) c = _test_parser('(?:a*|b|[c-d])') assert label('(?:a*|b|[c-d])') == str(c), str(c) c = _test_parser('a(?:a|b)*') assert label('a(?:a|b)*') == str(c), str(c) c = _test_parser('a(?:[a-c]x|axb)*') assert label('a(?:[a-c]x|axb)*') == str(c), str(c) def test_bad_escape(self): #basicConfig(level=DEBUG) c = _test_parser(r'\+') assert label('\\+') == str(c), str(c) try: c = _test_parser('+') assert False, 'Expected error' except RegexpError: pass def test_bad_group(self): #basicConfig(level=DEBUG) try: _test_parser('(a)') assert False, 'Expected error' except SyntaxError as e: assert 'do not currently support matched groups' in str(e), e def test_escape(self): c = _test_parser('\\x40') assert label('@') == str(c), str(c) c = _test_parser('\\u0040') assert label('@') == str(c), str(c) c = _test_parser('\\U00000040') assert label('@') == str(c), str(c) class NfaTest(TestCase): def assert_matches(self, pattern, text, results): r = _test_parser(pattern) m = r.nfa().match s = list(m(DEFAULT_STREAM_FACTORY.from_string(text))) assert len(s) == len(results), s for (a, b) in zip(s, results): assert a[1] == b, a[1] + ' != ' + b def test_simple(self): #basicConfig(level=DEBUG) self.assert_matches('ab', 'abc', ['ab']) def test_star(self): self.assert_matches('a*b', 'aaabc', ['aaab']) def test_plus(self): self.assert_matches('[a-z]+', 'abc', ['abc', 'ab', 'a']) def test_choice(self): self.assert_matches('(?:a|b)', 'ac', ['a']) def test_star_choice(self): self.assert_matches('(?:a|b)*', 'aababbac', ['aababba', 'aababb', 'aabab', 'aaba', 'aab', 'aa', 'a', '']) def test_multiple_choice(self): #basicConfig(level=DEBUG) self.assert_matches('(?:a|ab)b', 'abb', ['ab', 'abb']) def test_range(self): self.assert_matches('[abc]*', 'bbcx', ['bbc', 'bb', 'b', '']) self.assert_matches('[A-Z][a-z]*', 'Abc', ['Abc', 'Ab', 'A']) def test_range_overlap(self): ''' Matches with 'b' are duplicated, since it appears in both ranges. ''' self.assert_matches('(?:[ab]|[bc])*', 'abc', ['abc', 'ab', 'abc', 'ab', 'a', '']) def test_complex(self): #basicConfig(level=DEBUG) self.assert_matches('a(?:[x-z]|a(?:g|b))*(?:u|v)p', 'ayagxabvp', ['ayagxabvp']) class DfaGraphTest(TestCase): def assert_dfa_graph(self, regexp, desc): r = _test_parser(regexp) nfa = NfaGraph(UNICODE) r.expression.build(nfa, nfa.new_node(), nfa.new_node()) dfa = NfaToDfa(nfa, UNICODE).dfa try: assert str(dfa) in desc, str(dfa) except: assert str(dfa) == desc, str(dfa) def test_dfa_no_empty(self): self.assert_dfa_graph('abc', '0: [0] a->1; 1: [3] b->2; 2: [4] c->3; 3(label): [1, 2]') def test_dfa_simple_repeat(self): self.assert_dfa_graph('ab*c', '0: [0] a->1; 1: [3, 4, 5] c->2,b->3; 2(label): [1, 2]; 3: [4, 5] c->2,b->3') def test_dfa_simple_choice(self): self.assert_dfa_graph('a(?:b|c)', '0: [0] a->1; 1: [3, 4] [b-c]->2; 2(label): [1, 2]') def test_dfa_repeated_choice(self): self.assert_dfa_graph('a(?:b|cd)*e', '0: [0] a->1; 1: [3, 4, 5, 6] e->2,c->3,b->4; 2(label): [1, 2]; 3: [7] d->4; 4: [4, 5, 6] e->2,c->3,b->4') def test_dfa_overlapping_choice(self): self.assert_dfa_graph('a(?:bcd|bce)', '0: [0] a->1; 1: [3, 4] b->2; 2: [5, 7] c->3; 3: [8, 6] [d-e]->4; 4(label): [1, 2]') def test_dfa_conflicting_choice(self): self.assert_dfa_graph('a(?:bc|b*d)', '0: [0] a->1; 1: [3, 4, 6, 7] b->2,d->3; 2: [5, 6, 7] [c-d]->3,b->4; 3(label): [1, 2]; 4: [6, 7] d->3,b->4') def test_dfa_conflicting_choice_2(self): self.assert_dfa_graph('a(?:bb|b*c)', '0: [0] a->1; 1: [3, 4, 6, 7] b->2,c->3; 2: [5, 6, 7] c->3,b->4; 3(label): [1, 2]; 4(label): [1, 2, 6, 7] c->3,b->5; 5: [6, 7] c->3,b->5') def test_dfa_dot_option(self): ''' This one's nice - the 'a' completely disappears. ''' #basicConfig(level=DEBUG) self.assert_dfa_graph('.*a?b', (r'0: [0, 3, 4, 5] [\x00-ac-\uffff]->1,b->2; 1: [3, 4, 5] [\x00-ac-\uffff]->1,b->2; 2(label): [1, 2, 3, 4, 5] [\x00-ac-\uffff]->1,b->2', r'0: [0, 3, 4, 5] [\x00-ac-\U0010ffff]->1,b->2; 1: [3, 4, 5] [\x00-ac-\U0010ffff]->1,b->2; 2(label): [1, 2, 3, 4, 5] [\x00-ac-\U0010ffff]->1,b->2')) class DfaTest(TestCase): def assert_dfa(self, regexp, text, results): r = _test_parser(regexp).dfa().match((0, StringHelper(text))) assert r[1] == results, r def test_simple(self): self.assert_dfa('abc', 'abcd', 'abc') def test_dot_option(self): self.assert_dfa('.*a?b', 'aaabc', 'aaab') def test_empty(self): self.assert_dfa('a*', 'bc', '') self.assert_dfa('a*', '', '') def test_conflicting_choice(self): self.assert_dfa('a(?:bc|b*d)', 'abde', 'abd') self.assert_dfa('a(?:bc|b*d)', 'abce', 'abc') def test_space_star(self): self.assert_dfa(' *', ' a', ' ')
PypiClean
/EPN-0.1.1.zip/EPN-0.1.1/epn/epn.py
import sys import random import math import sets import pydot import copy class Node(object): def __init__(self, name): self.name = name def __str__(self): return self.name def __cmp__(self,other): return self.__hash__() != other.__hash__() def __repr__(self): return self.name class Place(Node): def __init__(self, name, tokens=0): self.name = name self.tokens = tokens super(Place, self).__init__(name) def __repr__(self): return "*P* "+self.name def __hash__(self): return hash("place"+self.name) class Transition(Node): def __init__(self, name, parameter=0): self.parameter = parameter self.name = name super(Transition, self).__init__(name) def __repr__(self): return "*T* "+self.name def __hash__(self): return hash("transition"+self.name) class ResizablePlace(Node): def __init__(self, name, tokens=0): self.name = name self.tokens = tokens super(Place, self).__init__(name) def __repr__(self): return "*RP* "+str(self.name) def __hash__(self): return hash("Resizableplace"+self.name) class ResizableTransition(Node): def __init__(self, name, parameter=0): self.parameter = parameter self.name = name super(HiddenTransition, self).__init__(name) def __repr__(self): return "*RT* "+str(self.name) def __hash__(self): return hash("Resizabletransition"+self.name) class Arc(): def __init__(self,a,b): self.weight = 1 # default self.start = a self.end = b def increment(self): self.weight += 1 def decrement(self): self.weight -= 1 def change_weight(self,v): self.weight += v def __repr__(self): return "*ARC* ("+str(self.start)+","+str(self.end)+")" def __hash__(self): return hash("arco"+str(self.start)+str(self.end)) def __cmp__(self,other): return self.__hash__() != other.__hash__() class PetriNet(object): def __init__(self): print "New PN created" self.P = sets.Set() self.T = sets.Set() self.F = sets.Set() def get_places(self): return self.P def get_arc(self, a): for e in self.F: if e == a: return e return None def get_arcs(self): return self.F def get_transitions(self): return self.T def get_weight(self,a): if a in self.F: return a.weight else: print "Arc not found" return 0 def set_weight(self,a,w): if w==0: print "Weight is zero, removing arc", a self.F.remove(a) # the removal of an arc may leave an bad-formed transition, i.e., # a transition whose pre- or post-set_ is empty. # TRANSIZIONE -> POSTO if self.isTransition(a.start): if len(self.get_Preset_(a.start)) == 0 or len(self.get_Postset_(a.start)) == 0: self.safeRemoveResizableTransition(a.start) if len(self.get_ArcsConnectedToNode(a.end)) == 0: self.safeRemoveResizablePlace(a.end) # POSTO -> TRANSIZIONE else: # elimino la transizione (dovrei controllare se ha altri posti attaccati... TODO) if len(self.get_Preset_(a.end)) == 0 or len(self.get_Postset_(a.end)) == 0: self.safeRemoveResizableTransition(a.end) # elimino il posto di partenza if len(self.get_ArcsConnectedToNode(a.start)) == 0: self.safeRemoveResizablePlace(a.start) # self.removeSingletons() """ if isinstance(a.start, Transition) or isinstance(a.start, ResizableTransition): if self.get_Preset_(a.start) == []: self.safeRemoveResizableTransition(a.start) elif isinstance(a.end, Transition) or isinstance(a.end, ResizableTransition): if self.get_Postset_(a.end) == []: self.safeRemoveResizableTransition(a.end) """ """ if isinstance(a.start, Place) or isinstance(a.start, ResizablePlace): self.safeRemoveResizableTransition(a.end) if isinstance(a.end, Place) or isinstance(a.end, ResizablePlace): self.safeRemoveResizableTransition(a.end) """ """ print "K"*100 print a print self.get_ArcsConnectedToNode(a.start) print self.get_ArcsConnectedToNode(a.end) # check consistency try: if self.get_ArcsConnectedToNode(a.start) == []: self.safeRemoveResizableTransition(a.start) if self.get_ArcsConnectedToNode(a.end) == []: self.safeRemoveResizableTransition(a.end) except: pass """ return if a in self.F: print "Old weight:", a.weight a.weight = w print "New weight:", a.weight else: print "ERROR: Arc not found" def get_ArcsConnectedToNode(self,p): list_places = [] for a in self.F: if a.start == p or a.end == p: list_places.append(a) return list_places def add_place(self,p): if isinstance(p, Place): self.P.add(p) print "Place",p,"added" return len(self.P) def addTransition(self,t): if isinstance(t, Transition): self.T.add(t) print "Transition",t,"added" return len(self.T) def addArc(self,From=None,To=None,arc=None,weight=1): if arc!=None: From = arc.start To = arc.end if (self.is_place(From)) and (self.is_transition(To)): # print "Adding input place '", From,"' to transition '",To,"'" a = Arc(From,To) a.weight = weight self.F.add( a ) return if (self.is_transition(From)) and (self.is_place(To)): # print "Adding output place '",To,"' to transition '",From,"'" a = Arc(From,To) a.weight = weight self.F.add( a ) return def removeArc(self, a): print "Arc",a,"removed" self.F.remove(a) def addCompleteTransition(self,p1, t, p2): self.addArc( arc=Arc(p1,t) ) self.addArc( arc=Arc(t,p2) ) print "Complete transition",p1,"->",t,"->",p2,"added" return self.F class ResizablePetriNet(PetriNet): def __init__(self, nome): super(ResizablePetriNet, self).__init__() print "New RPN created" self.RP = sets.Set() self.RT = sets.Set() self.ID = nome self.FITNESS_VALUE = 0 def get_ID(self): return self.ID def subDumpToFile(self, path, G): # graph = pydot.Dot(self.ID, graph_type='digraph') #@ graph = pydot.Subgraph(self.ID, rank='same') graph = pydot.Subgraph("cluster"+self.ID) for p in self.get_Places(): graph.add_node(pydot.Node(self.ID+str(p), label=str(p), shape="circle", fixed_size="True", width="1", penwidth="2" )) for p in self.set_resizable_Places(): graph.add_node(pydot.Node(self.ID+str(p), label=str(p), shape="circle", fixed_size="True", width="1", color="darkgreen", penwidth="2" )) for t in self.get_Transitions(): graph.add_node(pydot.Node(self.ID+str(t), label=str(t), shape="rectangle", style="filled", fillcolor="grey9", fontcolor="white", width="1")) for t in self.set_resizable_Transitions(): graph.add_node(pydot.Node(self.ID+str(t), label=str(t), shape="rectangle", style="filled", fillcolor="lightgrey", fontcolor="black", width="1")) for a in self.set_all_Arcs(): edge = pydot.Edge( self.ID+str(a.start), self.ID+str(a.end), label=a.weight ) graph.add_edge(edge) pass G.add_subgraph(graph) def __repr__(self): return self.ID def get_all_places(self): return self.P.union(self.RP) def get_static_places(self): return self.P def get_resizable_places(self): return self.RP def get_all_transitions(self): return self.T.union(self.RT) def get_static_transitions(self): return self.T def get_hidden_transitions(self): return self.RT def get_all_arcs(self): return self.F def add_place(self,p): if isinstance(p, Place): # print p, "is a Place" super(ResizablePetriNet, self).add_place(p) return if isinstance(p, ResizablePlace): # print p, "is a ResizablePlace" self.addResizablePlace(p) def addResizablePlaces(self,pl): pl = filter( lambda x: isinstance(x, ResizablePlace), pl ) for p in pl: self.addResizablePlace(p) def safeRemoveResizablePlace(self,p): print "Removing place", p if isinstance(p, ResizablePlace): print "Okay, it's Resizable" for a in self.get_ArcsConnectedToNode(p): self.F.remove(a) print "Removing pending arc", a self.RP.remove(p) def safeRemoveResizablePlaces(self, listp): for p in listp: self.safeRemoveResizablePlace(p) def safeRemoveResizableTransition(self,t): nodi = [] for a in self.get_ArcsConnectedToNode(t): inv = None if a.start != t: inv = a.start else: inv = a.end nodi.append(inv) self.F.remove(a) self.VT.remove(t) # in case of loops, everything blows up: we use try instead. for n in nodi: if self.get_ArcsConnectedToNode(n) == []: if isinstance(n, ResizablePlace): try: self.RP.remove(n) except: pass def addResizablePlace(self,rp): if isinstance(rp, ResizablePlace): self.RP.add(rp) # print "Resizable place '",rp,"' added to", self.ID return self.RP def addTransition(self,t): if isinstance(t, Transition): super(ResizablePetriNet, self).addTransition(t) return if isinstance(t, ResizableTransition): self.addResizableTransition(t) def add_resizable_transition(self,vt): if isinstance(vt, ResizableTransition): self.VT.add(vt) # print "Resizable transition '",vt,"' added" return self.VT def is_place(self,x): return (isinstance(x, Place) or isinstance(x, ResizablePlace)) def is_transition(self,x): return (isinstance(x, Transition) or isinstance(x, ResizableTransition)) def add_arcs(self, arclist): for a in arclist: self.addArc(arc=a) def get_preset(self, a): if self.is_place(a): return { arco.start for arco in self.F if arco.end==a } if self.is_transition(a): return { arco.start for arco in self.F if arco.end==a } def get_postset(self, a): if self.is_place(a): return { arco.end for arco in self.F if arco.start==a } if self.is_transition(a): return { arco.end for arco in self.F if arco.start==a } def add_whole_transition(self,a,t,b): print "Adding new whole transition",a,"->",t,"->",b if a not in self.get_all_places(): self.add_place(a) if b not in self.get_all_places(): self.add_place(b) if t not in self.get_all_transitions(): self.addTransition(t) self.addArc( arc=Arc(a,t) ) self.addArc( arc=Arc(t,b) ) print "New whole transition '",a,"' -> '",t,"' -> '",b,"' created" def add_whole_transitionList(self,a,t,b): combs = [(x,t,y) for x in a for y in b] for p1,trans,p2 in combs: self.add_whole_transition(p1,trans,p2) class EvolutionaryPetriNet(object): def __init__(self, title=None): print "New empty EPN created" self.RPNS = [] self.counter_Resizableplaces = 0 self.counter_Resizabletransitions = 0 self.USE_UNIFORM_PROBABILITY = True self.O_PRE = -1 self.O_POST = -1 self.Title = title self.POP_SIZE = 0 # ""constants"" self.USE_ROULETTE = 1 self.USE_RANKING = 2 self.USE_TOURNAMENT = 3 self.SEL_TYPE = self.USE_TOURNAMENT self.TOUR_SIZE = 2 def set_pre_order(self,n): self.O_PRE = n print " * Maximum pre-order of transitions:", self.O_PRE def set_post_order(self,n): self.O_POST = n print " * Maximum pre-order of transitions:", self.O_POST def fetch_unique_transition_ID(self): self.counter_Resizabletransitions += 1 return self.counter_Resizabletransitions def fetch_unique_place_ID(self): self.counter_Resizableplaces += 1 return self.counter_Resizableplaces def request_resizable_transition(self): nvt = ResizableTransition("vt"+str(self.fetchUniqueTransitionId())) print "New Resizable Transition requested:", nvt return nvt def request_resizable_place(self): nrp = ResizablePlace("rp"+str(self.fetchUniquePlaceId())) print "New Resizable Place requested:", nrp return nrp def addRPN(self,v): self.RPNS.append(v) print "RPN '",v,"' added to EPN" def get_RPNS(self): return self.RPNS def removeSingletons(self, i, list_places): """ If a mutation or crossover removes all arcs from/to a Resizable place contained in list_places, remove that place. """ for p in list_places: # print "Checking arcs to",p # print self.get_RPNS()[i].get_ArcsConnectedToNode(p) if len(self.get_RPNS()[i].get_ArcsConnectedToNode(p))==0: self.get_RPNS()[i].set_resizable_Places.remove(p) def randomlyModifyWeights(self, i, t): print "Mutation #8", "_"*100 random_diff = random.choice([-1,+1]) # == 0 ok? TODO p_arc = random.choice( self.get_RPNS()[i].get_ArcsConnectedToNode(t) ) # p_arc.weight += random_diff self.get_RPNS()[i].set_Weight(p_arc, p_arc.weight + random_diff) def cleanUpThings(self, t, idx=None): if idx!=None: self.verifyOrder(idx,t) pre = self.get_RPNS()[idx].get_Preset_(t) post = self.get_RPNS()[idx].get_Postset_(t) self.removeSingletons(idx,pre) self.removeSingletons(idx,post) def actuallyMutate(self, v): index = 0 if isinstance(v, ResizablePetriNet): index = self.get_RPNS().index(v) else: index = v p1, t, p2, existing_triple = self.generateTriple(index) # mutation introduces a new transition (p1,t),(t,p2) # this could invalidate pre- and post-order conditions # moreover, we must identify mutation #8 TODO if existing_triple: self.randomlyModifyWeights(index,t) else: self.get_RPNS()[index].add_whole_transition(p1,t,p2) self.cleanUpThings(t,idx=index) """ self.verifyOrder(index,t) pre = self.get_RPNS()[index].get_Preset_(t) post = self.get_RPNS()[index].get_Postset_(t) self.removeSingletons(index,pre) self.removeSingletons(index,post) """ def mutate(self, v): """ Mutate an arbitrary RPN "v". This method mimickes a C++ overload. Returns the preset_ and postset_ modified by mutation. """ if isinstance(v, int): if len(self.get_RPNS())-1<v: print "ERROR: RPN",v,"does not exist!" exit(-2) if isinstance(v, ResizablePetriNet): if v not in self.get_RPNS(): print "ERRO: RPN",v,"does not exist!" exit(-2) self.actuallyMutate(v) def verifyPreorder(self,i,t): if self.O_PRE == -1: return while(True): total = 0 preset_ = self.get_RPNS()[i].get_Preset_(t) for p in preset_: total += self.get_RPNS()[i].get_Weight( Arc(p,t) ) if total <= self.O_PRE: return else: print "Fixing pre-order of transition",t," (",total,")" picked_arc = Arc(random.choice(list(preset_)),t) pa = self.get_RPNS()[i].get_Arc(picked_arc) w = pa.weight self.get_RPNS()[i].set_Weight( pa , w-1 ) """print picked_arc w = self.get_RPNS()[i].get_Weight( picked_arc ) self.get_RPNS()[i].set_Weight( picked_arc , w-1 ) """ if self.get_RPNS()[i].get_ArcsConnectedToNode(picked_arc.start) == []: print "Singleton detected: removing", picked_arc.start # TODO: check whether it is Resizable or not try: self.get_RPNS()[i].RP.remove(picked_arc.start) except: pass def verifyPreorderOld(self,i,t): if self.O_PRE == -1: return while(True): total = 0 preset_ = self.get_RPNS()[i].get_Preset_(t) for p in preset_: total += self.get_RPNS()[i].get_Weight( Arc(p,t) ) if total<=self.O_PRE: return else: print "Fixing pre-order (",total,")" total_arcs = len( preset_ ) picked_arc = random.randint(0, total_arcs) for n,a in enumerate(preset_): if n==picked_arc: break w = self.get_RPNS()[i].get_Weight( Arc(a,t) ) self.get_RPNS()[i].set_Weight( Arc(a,t) , w-1 ) if self.get_RPNS()[i].get_ArcsConnectedToNode(a) == []: print "Singleton detected: removing", a # TODO: check whether it is Resizable or not self.get_RPNS()[i].RP.remove(a) def verifyPostorder(self,i,t): if self.O_POST == -1: return while(True): total = 0 postset_ = self.get_RPNS()[i].get_Postset_(t) for p in postset_: total += self.get_RPNS()[i].get_Weight( Arc(t,p) ) if total<=self.O_POST: return else: print "Fixing post-order (",total,")" total_arcs = len( postset_ ) picked_arc = random.randint(0, total_arcs) for n,a in enumerate(postset_): if n==picked_arc: break w = self.get_RPNS()[i].get_Weight( Arc(t,a) ) self.get_RPNS()[i].set_Weight( Arc(t,a) , w-1 ) if self.get_RPNS()[i].get_ArcsConnectedToNode(a) == []: print "Singleton detected: removing" # TODO: check whether it is Resizable or not self.get_RPNS()[i].RP.remove(a) def verifyOrder(self,i,t): self.verifyPreorder(i,t) self.verifyPostorder(i,t) def prob_distribution_newplace(self, i): tp = len(self.get_RPNS()[i].set_all_places()) if self.USE_UNIFORM_PROBABILITY: ptb = 1./(tp+1) else: pass # TODO return ptb def generateTriple(self, i): """ This methods generates a 3-ple for the mutation, according to the probability distributions defined by programmer (default = uniform). It exploits existing places and Resizable places from i-th RPN and P^\infty. It exploits existing transitions and Resizable transitions from i-th RPN and T^\infty. """ new_p1 = False new_p2 = False total_places = len(self.get_RPNS()[i].set_all_places()) total_prob = total_places +1 choice = random.uniform(0,total_prob) choice_index = int(choice) if choice_index>=total_places: p1 = self.requestResizablePlace() new_p1 = True else: p1 = list(self.get_RPNS()[i].set_all_places())[choice_index] print "P1 = ", p1 # le transizioni che vengono toccate sono SOLO quelle Resizablei! total_transitions = len(self.get_RPNS()[i].set_resizable_Transitions()) total_prob = total_transitions +1 choice = random.uniform(0,total_prob) choice_index = int(choice) if choice_index>=total_transitions: t = self.requestResizableTransition() self.get_RPNS()[i].addResizableTransition(p1) else: t = list(self.get_RPNS()[i].set_resizable_Transitions())[choice_index] print "T = ", t total_places = len(self.get_RPNS()[i].set_all_places()) total_prob = total_places +1 choice = random.uniform(0,total_prob) choice_index = int(choice) if choice_index>=total_places: p2 = self.requestResizablePlace() new_p2 = True # self.get_RPNS()[i].addResizablePlace(p1) else: p2 = list(self.get_RPNS()[i].set_all_places())[choice_index] print "P2 = ", p2 if new_p1: self.get_RPNS()[i].addResizablePlace(p1) if new_p2: self.get_RPNS()[i].addResizablePlace(p2) existing_transition = Arc(p1,t) in self.get_RPNS()[i].set_all_Arcs() and Arc(t,p2) in self.get_RPNS()[i].set_all_Arcs() # first_half = self.get_RPNS()[i].set_all_Arcs().intersection( Arc(p1,t) ) # second_half = first_half.intersection( Arc(t,p2) ) return p1,t,p2, existing_transition def dump_to_file(self, path): graph = pydot.Dot(self.Title, graph_type='digraph') for v in self.RPNS: v.subDumpToFile(path, graph) graph.write_png(path) def mutate_all_RPNS(self): print "*"*100 for v in self.get_RPNS(): print "Mutation individual", v E.mutate(v) def actual_crossover(self, t1, t2, v1, v2, debug=False): print "Exchange", t1, "and", t2 # step 1: identify p_e and p_b for each transition preset_1 = v1.get_Preset_(t1) postset_1 = v1.get_Postset_(t1) pb1 = random.sample(preset_1, 1)[0] pe1 = random.sample(postset_1, 1)[0] print "Pb1:", pb1, "Pe1:", pe1 preset_2 = v2.get_Preset_(t2) postset_2 = v2.get_Postset_(t2) pb2 = random.sample(preset_2, 1)[0] pe2 = random.sample(postset_2, 1)[0] print "Pb2:", pb2, "Pe2:", pe2 # step 1: copy P+ (renaming) v2.addResizableTransition( t1 ) v1.addResizableTransition( t2 ) # E.dumpToFile("prova0_trans.png") """ COPY SUBSTRUCTURES """ for p in preset_1: e = v1.get_Arc(Arc(p,t1)) if isinstance(p, ResizablePlace): if p != pb1: pnew = copy.deepcopy(p) if debug: pnew.name = "rp"+str(self.fetchUniquePlaceId()) + "(" + pnew.name + ")" else: pnew.name = "rp"+str(self.fetchUniquePlaceId()) v2.addResizablePlace( pnew ) v2.addArc( From=pnew, To=t1, weight=e.weight ) else: v2.addArc( From=pb2, To=t1, weight=e.weight ) else: v2.addArc( From=p, To=t1, weight=e.weight ) # E.dumpToFile("prova0_trans2.png") for p in preset_2: e = v2.get_Arc(Arc(p,t2)) if isinstance(p, ResizablePlace): if p != pb2: pnew = copy.deepcopy(p) if debug: pnew.name = "rp"+str(self.fetchUniquePlaceId()) + "(" + pnew.name + ")" else: pnew.name = "rp"+str(self.fetchUniquePlaceId()) v1.addResizablePlace( pnew ) v1.addArc( pnew, t2, weight=e.weight ) else: v1.addArc( pb1, t2, weight=e.weight ) else: v1.addArc( p, t2, weight=e.weight ) # E.dumpToFile("prova0_trans3.png") for p in postset_1: e = v1.get_Arc(Arc(t1,p)) if isinstance(p, ResizablePlace): if p != pe1: pnew = copy.deepcopy(p) if debug: pnew.name = "rp"+str(self.fetchUniquePlaceId()) + "(" + pnew.name + ")" else: pnew.name = "rp"+str(self.fetchUniquePlaceId()) v2.addResizablePlace( pnew ) v2.addArc( t1, pnew, weight=e.weight ) else: v2.addArc( t1, pe2, weight=e.weight ) else: v2.addArc( t1, p, weight=e.weight ) # E.dumpToFile("prova0_trans3.png") for p in postset_2: e = v2.get_Arc(Arc(t2,p)) if isinstance(p, ResizablePlace): if p != pe2: pnew = copy.deepcopy(p) if debug: pnew.name = "rp"+str(self.fetchUniquePlaceId()) + "(" + pnew.name + ")" else: pnew.name = "rp"+str(self.fetchUniquePlaceId()) v1.addResizablePlace( pnew ) v1.addArc( t2, pnew, weight=e.weight ) else: v1.addArc( t2, pe1, weight=e.weight ) else: v1.addArc( t2, p, weight=e.weight ) # E.dumpToFile("prova0_trans4.png") """ FINE: RIPULIAMO LE STRUTTURE """ v1.safeRemoveResizableTransition(t1) v2.safeRemoveResizableTransition(t2) def crossover(self, v1, v2, debug=False): print "X"*100 # step 1: choose two Resizable transitions pick_1 = random.sample(v1.set_resizable_Transitions(),1)[0] pick_2 = random.sample(v2.set_resizable_Transitions(),1)[0] # step 2: exchange substructures self.actualCrossover(pick_1, pick_2, v1, v2, debug=debug) def selection(self): if self.SEL_TYPE == self.USE_TOURNAMENT: self.tournament() elif self.SEL_TYPE == self.USE_RANKING: self.ranking() else: self.roulette() def tournament(self): selected_population = [] # for each individual for i in range(self.POP_SIZE): sampled = random.sample(range(0,self.POP_SIZE), self.TOUR_SIZE) print sampled best = 0 best_fitness = self.RPNS[0] for s in sampled: if self.RPNS[s].FITNESS_VALUE < best_fitness: best_fitness = self.RPNS[s].FITNESS_VALUE best = s selected_population.append( self.RPNS[best] ) def ranking(self): pass def roulette(self): pass def set_mutations_per_creation(self, n): print " * Will use",n,"mutations to create each HPN" self.mutationsPerCreation = n def set_population_size(self, s): if self.MODEL == None: print "ERROR: please specify a model before initializing the population" exit(-4) self.POP_SIZE = s print " * Population size set_ to", self.POP_SIZE self.createPopulation() def createPopulation(self): if self.POP_SIZE == 0: print "ERROR: please specify the population's size" exit(-5) for i in range(self.POP_SIZE): # p = ResizablePetriNet("V"+str(i)) p = copy.deepcopy(self.MODEL) p.ID = "V"+str(i) self.addRPN(p) print " *", self.POP_SIZE, "RPNs added to EPN" def use_tournament(self, size=2): self.SEL_TYPE = self.USE_TOURNAMENT self.TOUR_SIZE = size print " * Selection mechanism: tournament with tournament size", size def use_ranking(self): self.SEL_TYPE = self.USE_RANKING def use_roulette(self): self.SEL_TYPE = self.USE_ROULETTE def set_static_model(self,sm): self.MODEL = sm if __name__ == '__main__': random.seed(5) VM = ResizablePetriNet("model") VM.add_whole_transition( Place("input"), Transition("transizione"), Place("output") ) E = EvolutionaryPetriNet() E.set_StaticModel(VM) E.set__population_size(10) E.uset_ournament(size=2) E.set_PreOrder(2) """ p0 = ResizablePetriNet("V0") p1 = ResizablePetriNet("V1") p2 = ResizablePetriNet("V2") """ """ vt0 = E.requestResizableTransition() vp0 = E.requestResizablePlace() vp1 = E.requestResizablePlace() vp2 = E.requestResizablePlace() vp3 = E.requestResizablePlace() vp4 = E.requestResizablePlace() vp5 = E.requestResizablePlace() p1.add_whole_transitionList([vp0], vt0, [vp2, vp3]) p1.add_whole_transitionList([vp2], Transition("vera"), [vp1,Place("output")]) p2.add_whole_transitionList([vp4, vp5], ResizableTransition("transizione"), [Place("output")]) """ """ vt0 = E.requestResizableTransition() vt1 = E.requestResizableTransition() vp0 = E.requestResizablePlace() vp1 = E.requestResizablePlace() p1.add_whole_transitionList( [vp0], vt0, [Place("output")] ) p1.add_place(Place("input")) p2.add_whole_transitionList( [Place("input")], vt1, [vp1] ) p2.add_place(Place("output")) E.addRPN(p1) E.addRPN(p2) """ E.dumpToFile("EPN.png") """ for i in range(1,2): print "Iteration",i,"^"*100 # E.set_RandomFitness() E.selection() E.crossover(p1,p2,debug=False) E.dumpToFile("prova"+str(i)+"a.png") E.mutateAllRPNS() E.dumpToFile("prova"+str(i)+"b.png") """
PypiClean
/GoogleAppEngineMapReduce-1.9.22.0.tar.gz/GoogleAppEngineMapReduce-1.9.22.0/mapreduce/api/map_job/output_writer.py
"""Output writer interface for map job.""" from mapreduce import errors from mapreduce import json_util from mapreduce import shard_life_cycle # pylint: disable=protected-access # pylint: disable=invalid-name # Counter name for number of bytes written. COUNTER_IO_WRITE_BYTES = "io-write-bytes" # Counter name for time spent writing data in msec COUNTER_IO_WRITE_MSEC = "io-write-msec" class OutputWriter(shard_life_cycle._ShardLifeCycle, json_util.JsonMixin): """Abstract base class for output writers. OutputWriter's lifecycle: 0) validate() is called to validate JobConfig. 1) create() is called, which should create a new instance of output writer for the given shard 2) beging_shard/end_shard/begin_slice/end_slice are called at the time implied by the names. 3) from_json()/to_json() are used to persist writer's state across multiple slices. 4) write() method is called with data yielded by JobConfig.mapper. """ def __init__(self): self._slice_ctx = None @classmethod def validate(cls, job_config): """Validates relevant parameters. This method can validate fields which it deems relevant. Args: job_config: an instance of map_job.JobConfig. Raises: errors.BadWriterParamsError: required parameters are missing or invalid. """ if job_config.output_writer_cls != cls: raise errors.BadWriterParamsError( "Expect output writer class %r, got %r." % (cls, job_config.output_writer_cls)) @classmethod def from_json(cls, state): """Creates an instance of the OutputWriter for the given json state. No RPC should take place in this method. Use start_slice/end_slice instead. Args: state: The output writer state as returned by to_json. Returns: An instance of the OutputWriter that can resume writing. """ raise NotImplementedError("from_json() not implemented in %s" % cls) def to_json(self): """Returns writer state. No RPC should take place in this method. Use start_slice/end_slice instead. Returns: A json-serializable state for the OutputWriter instance. """ raise NotImplementedError("to_json() not implemented in %s" % type(self)) @classmethod def create(cls, shard_ctx): """Create new writer for a shard. Args: shard_ctx: map_job_context.ShardContext for this shard. """ raise NotImplementedError("create() not implemented in %s" % cls) def write(self, data): """Write data. Args: data: actual data yielded from handler. User is responsible to match the type expected by this writer to the type yielded by mapper. """ raise NotImplementedError("write() not implemented in %s" % self.__class__) @classmethod def commit_output(cls, shard_ctx, iterator): """Saves output references when a shard finishes. Inside end_shard(), an output writer can optionally use this method to persist some references to the outputs from this shard (e.g a list of filenames) Args: shard_ctx: map_job_context.ShardContext for this shard. iterator: an iterator that yields json serializable references to the outputs from this shard. Contents from the iterator can be accessible later via map_job.Job.get_outputs. """ # We accept an iterator just in case output references get too big. outs = tuple(iterator) shard_ctx._state.writer_state["outs"] = outs def begin_slice(self, slice_ctx): """Keeps an internal reference to slice_ctx. Args: slice_ctx: SliceContext singleton instance for this slice. """ self._slice_ctx = slice_ctx def end_slice(self, slice_ctx): """Drops the internal reference to slice_ctx. Args: slice_ctx: SliceContext singleton instance for this slice. """ self._slice_ctx = None # TODO(user): Update recovery related method to not use *_spec. def _supports_slice_recovery(self, mapper_spec): """Whether this output writer supports slice recovery. Args: mapper_spec: instance of model.MapperSpec. Returns: True if it does. False otherwise. """ return False # pylint: disable=unused-argument def _recover(self, mr_spec, shard_number, shard_attempt): """Create a new output writer instance from the old one. This method is called when _supports_slice_recovery returns True, and when there is a chance the old output writer instance is out of sync with its storage medium due to a retry of a slice. _recover should create a new instance based on the old one. When end_shard is called on the new instance, it could combine valid outputs from all instances to generate the final output. How the new instance maintains references to previous outputs is up to implementation. Any exception during recovery is subject to normal slice/shard retry. So recovery logic must be idempotent. Args: mr_spec: an instance of model.MapreduceSpec describing current job. shard_number: int shard number. shard_attempt: int shard attempt. Returns: a new instance of output writer. """ raise NotImplementedError()
PypiClean
/MCRAMP-0.0.3-py3-none-any.whl/mcramp/geom/box.py
from .gprim import GPrim #pylint: disable=E0401 import numpy as np import pyopencl as cl import pyopencl.array as clarr import os class GBox(GPrim): """ Geometry kernel for 'box' geometry. Intersects with the exterior of the box, i.e. first intersection must be at positive time for scattering to occur. ... Parameters ---------- width : float The width of the box height : float The height of the box depth : float The depth of the box Notes ----- Intersection 1 : First point of intersection with the box geometry - 'entering' box. Intersection 2 : Second point of intersection with the box geometry - 'exiting' box. Methods ------- None """ def __init__(self, width=0, height=0, depth=0, idx=0, ctx=None): self.width = np.float32(width) self.height = np.float32(height) self.depth = np.float32(depth) self.idx = idx with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'box.cl'), mode='r') as f: self.prg = cl.Program(ctx, f.read()).build(options=r'-I "{}/include"'.format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) def intersect_prg(self, queue, N, neutron_buf, intersection_buf, iidx_buf): self.prg.intersect_box(queue, (N,), None, neutron_buf, intersection_buf, iidx_buf, np.uint32(self.idx), self.width, self.height, self.depth) def lines(self): w_2 = self.width / 2.0 h_2 = self.height / 2.0 d_2 = self.depth / 2.0 vertices = [[-w_2, h_2, -d_2], [w_2, h_2, -d_2], [w_2, -h_2, -d_2], [-w_2, -h_2, -d_2], [-w_2, h_2, d_2], [w_2, h_2, d_2], [w_2, -h_2, d_2], [-w_2, -h_2, d_2]] winding = [0,1,2,3,0,4,5,1,2,6,5,6,7,4,0,3,7] x = [] y = [] z = [] for w in winding: x.append(vertices[w][0]) y.append(vertices[w][1]) z.append(vertices[w][2]) return [x, y, z]
PypiClean
/EZFF-1.0.0.tar.gz/EZFF-1.0.0/examples/sw-gulp-serial/run.py
import ezff from ezff.interfaces import vasp, gulp import numpy as np # DEFINE GROUND TRUTHS gt_relax_disp_GM = vasp.read_phonon_dispersion('ground_truths/relaxed/GM.dat') gt_relax_structure = vasp.read_atomic_structure('ground_truths/relaxed/POSCAR') gt_c11 = 160.0 #GPa for C11 of MoSe2 gt_expanded_disp_GM = vasp.read_phonon_dispersion('ground_truths/expanded/GM.dat') gt_expanded_structure = vasp.read_atomic_structure('ground_truths/expanded/POSCAR') gt_compressed_disp_GM = vasp.read_phonon_dispersion('ground_truths/compressed/GM.dat') gt_compressed_structure = vasp.read_atomic_structure('ground_truths/compressed/POSCAR') def my_error_function(variable_values, template): myrank = ezff.get_pool_rank() # FOR THE RELAXED STRUCTURE path = str(myrank)+'/relaxed' relaxed_job = gulp.job(path=path) relaxed_job.structure = gt_relax_structure relaxed_job.forcefield = ezff.generate_forcefield(template, variable_values, FFtype = 'SW') relaxed_job.options['pbc'] = True relaxed_job.options['relax_atoms'] = True relaxed_job.options['relax_cell'] = True relaxed_job.options['phonon_dispersion'] = True relaxed_job.options['phonon_dispersion_from'] = '0 0 0' relaxed_job.options['phonon_dispersion_to'] = '0.5 0 0' # Submit job and read output relaxed_job.run() # Read output from completed GULP job and cleanup job files md_relax_disp_GM = relaxed_job.read_phonon_dispersion() md_relaxed_moduli = relaxed_job.read_elastic_moduli() md_relaxed_structure = relaxed_job.read_structure() relaxed_job.cleanup() # FINISH RELAXED JOB # FOR THE COMPRESSED STRUCTURE path = str(myrank)+'/compressed' compressed_job = gulp.job(path=path) compressed_job.structure = gt_compressed_structure compressed_job.forcefield = ezff.generate_forcefield(template, variable_values, FFtype = 'SW') compressed_job.options['pbc'] = True compressed_job.options['relax_atoms'] = True compressed_job.options['relax_cell'] = False compressed_job.options['phonon_dispersion'] = True compressed_job.options['phonon_dispersion_from'] = '0 0 0' compressed_job.options['phonon_dispersion_to'] = '0.5 0 0' # Submit job and read output compressed_job.run() # Read output from completed GULP job and cleanup job files md_compressed_disp_GM = compressed_job.read_phonon_dispersion() compressed_job.cleanup() # FINISH COMPRESSED JOB # FOR THE EXPANDED STRUCTURE path = str(myrank)+'/expanded' expanded_job = gulp.job(path=path) expanded_job.structure = gt_expanded_structure expanded_job.forcefield = ezff.generate_forcefield(template, variable_values, FFtype = 'SW') expanded_job.options['pbc'] = True expanded_job.options['relax_atoms'] = True expanded_job.options['relax_cell'] = False expanded_job.options['phonon_dispersion'] = True expanded_job.options['phonon_dispersion_from'] = '0 0 0' expanded_job.options['phonon_dispersion_to'] = '0.5 0 0' # Submit job and read output expanded_job.run() # Read output from completed GULP job and cleanup job files md_expanded_disp_GM = expanded_job.read_phonon_dispersion() expanded_job.cleanup() # FINISH EXPANDED JOB # Compute 5 errors from the 3 GULP jobs error_abc, error_ang = ezff.error_lattice_constant(MD=md_relaxed_structure, GT=gt_relax_structure) lattice_error = np.linalg.norm(error_abc[0:2]) # Norm of errors in 'a' and 'b' lattice constants md_c11 = md_relaxed_moduli[0][0,0] * md_relaxed_structure.box[2,2] * (2.0/13.97) # Extracting c11 for a bulk-like layered structure from the monolayer GULP calculation modulus_error = np.linalg.norm(md_c11 - gt_c11) phon_error_relaxed = ezff.error_phonon_dispersion(MD=md_relax_disp_GM, GT=gt_relax_disp_GM, weights='uniform') phon_error_expanded = ezff.error_phonon_dispersion(MD=md_expanded_disp_GM, GT=gt_expanded_disp_GM, weights='uniform') phon_error_compressed = ezff.error_phonon_dispersion(MD=md_compressed_disp_GM, GT=gt_compressed_disp_GM, weights='uniform') return [lattice_error, modulus_error, phon_error_relaxed, phon_error_compressed, phon_error_expanded] if __name__ == '__main__': obj = ezff.FFParam(error_function = my_error_function, num_errors = 5) obj.read_variable_bounds('variable_bounds') obj.read_forcefield_template('template') obj.set_algorithm('randomsearch_so', population_size = 16) obj.parameterize(num_epochs = 5) obj.set_algorithm('mobo', population_size = 16) obj.parameterize(num_epochs = 5)
PypiClean
/CrystalMatch-1.2.2.tar.gz/CrystalMatch-1.2.2/README.md
[![Build Status](https://travis-ci.com/DiamondLightSource/CrystalMatch.svg?branch=master)](https://travis-ci.com/DiamondLightSource/CrystalMatch) [![Coverage Status](https://coveralls.io/repos/github/DiamondLightSource/CrystalMatch/badge.svg?branch=master)](https://coveralls.io/github/DiamondLightSource/CrystalMatch?branch=master) [![PyPI version](https://badge.fury.io/py/CrystalMatch.svg)](https://badge.fury.io/py/CrystalMatch) [![Downloads](https://pepy.tech/badge/crystalmatch)](https://pepy.tech/project/crystalmatch) VMXi Crystal Image Matching Readme ================================== Installation --------- CrystalMatch requires python 2.7 and openCV version 2.4.10 which can be downloaded from: * [OpenCV](https://opencv.org/releases.html) Once OpenCV is there CrystalMatch can be installed by running: ``` pip install CrystalMatch ``` See [pip installation](https://pip.pypa.io/en/stable/installing/) if you don't have pip. Background ---------- Details about the purpose of the VMXi Crystal Image Matching Program * [The VMXi Beamline](https://github.com/DiamondLightSource/CrystalMatch/blob/master/docs/vmxi.md) * [The Problem of Feature Tracking](https://github.com/DiamondLightSource/CrystalMatch/blob/master/docs/tracking.md) User Guide ---------- This section is a guide for users of the application * [The User Guide](https://github.com/DiamondLightSource/CrystalMatch/blob/master/docs/user_guide.md) Developer Guide --------------- This section serves as an introduction for developers who are interested in maintaining or extending the Diamond Light Source (DLS) VMXi beamline crystal image matching application. The documents should be read in order * [Code Use Instructions](https://github.com/DiamondLightSource/CrystalMatch/blob/master/docs/setup.md) * [The Solution](https://github.com/DiamondLightSource/CrystalMatch/blob/master/docs/solution.md)
PypiClean
/Ibid-0.1.1.tar.gz/Ibid-0.1.1/ibid/plugins/social.py
from urllib2 import HTTPError from time import time from datetime import datetime import re import logging import feedparser from ibid.compat import ElementTree from ibid.config import DictOption from ibid.plugins import Processor, match, handler from ibid.utils import ago, decode_htmlentities, generic_webservice, \ json_webservice, parse_timestamp log = logging.getLogger('plugins.social') features = {} features['lastfm'] = { 'description': u'Lists the tracks last listened to by the specified user.', 'categories': ('lookup', 'web',), } class LastFm(Processor): usage = u'last.fm for <username>' feature = ('lastfm',) @match(r'^last\.?fm\s+for\s+(\S+?)\s*$') def listsongs(self, event, username): songs = feedparser.parse('http://ws.audioscrobbler.com/1.0/user/%s/recenttracks.rss?%s' % (username, time())) if songs['bozo']: event.addresponse(u'No such user') else: event.addresponse(u', '.join(u'%s (%s ago)' % ( e.title, ago(event.time - parse_timestamp(e.updated)) ) for e in songs['entries'])) features['microblog'] = { 'description': u'Looks up messages on microblogging services like twitter ' u'and identica.', 'categories': ('lookup', 'web',), } class Twitter(Processor): usage = u"""latest (tweet|identica) from <name> (tweet|identica) <number>""" feature = ('microblog',) default = { 'twitter': {'endpoint': 'http://twitter.com/', 'api': 'twitter', 'name': 'tweet', 'user': 'twit'}, 'tweet': {'endpoint': 'http://twitter.com/', 'api': 'twitter', 'name': 'tweet', 'user': 'twit'}, 'identica': {'endpoint': 'http://identi.ca/api/', 'api': 'laconica', 'name': 'dent', 'user': 'denter'}, 'identi.ca': {'endpoint': 'http://identi.ca/api/', 'api': 'laconica', 'name': 'dent', 'user': 'denter'}, 'dent': {'endpoint': 'http://identi.ca/api/', 'api': 'laconica', 'name': 'dent', 'user': 'denter'}, } services = DictOption('services', 'Micro blogging services', default) class NoTweetsException(Exception): pass def setup(self): self.update.im_func.pattern = re.compile(r'^(%s)\s+(\d+)$' % '|'.join(self.services.keys()), re.I) self.latest.im_func.pattern = re.compile(r'^(?:latest|last)\s+(%s)\s+(?:update\s+)?(?:(?:by|from|for)\s+)?@?(\S+)$' % '|'.join(self.services.keys()), re.I) def remote_update(self, service, id): status = json_webservice('%sstatuses/show/%s.json' % (service['endpoint'], id)) return {'screen_name': status['user']['screen_name'], 'text': decode_htmlentities(status['text'])} def remote_latest(self, service, user): if service['api'] == 'twitter': # Twitter ommits retweets in the JSON and XML results: statuses = generic_webservice('%sstatuses/user_timeline/%s.atom' % (service['endpoint'], user.encode('utf-8')), {'count': 1}) tree = ElementTree.fromstring(statuses) latest = tree.find('{http://www.w3.org/2005/Atom}entry') if latest is None: raise self.NoTweetsException(user) return { 'text': latest.findtext('{http://www.w3.org/2005/Atom}content') .split(': ', 1)[1], 'ago': ago(datetime.utcnow() - parse_timestamp( latest.findtext('{http://www.w3.org/2005/Atom}published'))), 'url': [x for x in latest.getiterator('{http://www.w3.org/2005/Atom}link') if x.get('type') == 'text/html' ][0].get('href'), } elif service['api'] == 'laconica': statuses = json_webservice('%sstatuses/user_timeline/%s.json' % (service['endpoint'], user.encode('utf-8')), {'count': 1}) if not statuses: raise self.NoTweetsException(user) latest = statuses[0] url = '%s/notice/%i' % (service['endpoint'].split('/api/', 1)[0], latest['id']) return { 'text': decode_htmlentities(latest['text']), 'ago': ago(datetime.utcnow() - parse_timestamp(latest['created_at'])), 'url': url, } @handler def update(self, event, service_name, id): service = self.services[service_name.lower()] try: event.addresponse(u'%(screen_name)s: "%(text)s"', self.remote_update(service, int(id))) except HTTPError, e: if e.code in (401, 403): event.addresponse(u'That %s is private', service['name']) elif e.code == 404: event.addresponse(u'No such %s', service['name']) else: log.debug(u'%s raised %s', service['name'], unicode(e)) event.addresponse(u'I can only see the Fail Whale') @handler def latest(self, event, service_name, user): service = self.services[service_name.lower()] try: event.addresponse(u'"%(text)s" %(ago)s ago, %(url)s', self.remote_latest(service, user)) except HTTPError, e: if e.code in (401, 403): event.addresponse(u"Sorry, %s's feed is private", user) elif e.code == 404: event.addresponse(u'No such %s', service['user']) else: log.debug(u'%s raised %s', service['name'], unicode(e)) event.addresponse(u'I can only see the Fail Whale') except self.NoTweetsException, e: event.addresponse( u'It appears that %(user)s has never %(tweet)sed', { 'user': user, 'tweet': service['name'], }) @match(r'^https?://(?:www\.)?twitter\.com/(?:#!/)?[^/ ]+/statuse?s?/(\d+)$') def twitter(self, event, id): self.update(event, u'twitter', id) @match(r'^https?://(?:www\.)?identi.ca/notice/(\d+)$') def identica(self, event, id): self.update(event, u'identica', id) # vi: set et sta sw=4 ts=4:
PypiClean
/CodeIntel-2.0.0b19-cp34-cp34m-macosx_10_12_x86_64.whl/codeintel/codeintel2/lib_srcs/node.js/5.9/os.js
var os = {}; /** * Returns the system uptime in seconds. * @returns the system uptime in seconds */ os.uptime = function() {} /** * Returns the total amount of system memory in bytes. * @returns the total amount of system memory in bytes */ os.totalmem = function() {} /** * Returns the hostname of the operating system. * @returns the hostname of the operating system */ os.hostname = function() {} /** * Returns an array of objects containing information about each CPU/core * installed: model, speed (in MHz), and times (an object containing the * number of milliseconds the CPU/core spent in: user, nice, sys, idle, and * irq). * @returns {Array} an array of objects containing information about each CPU/core installed: model, speed (in MHz), and times (an object containing the number of milliseconds the CPU/core spent in: user, nice, sys, idle, and irq) */ os.cpus = function() {} /** * Returns an array containing the 1, 5, and 15 minute load averages. * @returns an array containing the 1, 5, and 15 minute load averages */ os.loadavg = function() {} /** * Returns the operating system release. * @returns the operating system release */ os.release = function() {} /** * Returns the operating system name. For example &#39;Linux&#39; on Linux, * &#39;Darwin&#39; on OS X and &#39;Windows_NT&#39; on Windows. * @returns the operating system name */ os.type = function() {} /** * Returns the amount of free system memory in bytes. * @returns the amount of free system memory in bytes */ os.freemem = function() {} /** * A constant defining the appropriate End-of-line marker for the operating * system. */ os.EOL = 0; /** * Returns the operating system CPU architecture. Possible values are * &#39;x64&#39;, &#39;arm&#39; and &#39;ia32&#39;. Returns the value of * [process.arch][]. * @returns the operating system CPU architecture */ os.arch = function() {} /** * Returns the endianness of the CPU. Possible values are &#39;BE&#39; for * big endian or &#39;LE&#39; for little endian. * @returns the endianness of the CPU */ os.endianness = function() {} /** * Returns the home directory of the current user. * @returns the home directory of the current user */ os.homedir = function() {} /** * Get a list of network interfaces: */ os.networkInterfaces = function() {} /** * Returns the operating system platform. Possible values are * &#39;darwin&#39;, &#39;freebsd&#39;, &#39;linux&#39;, &#39;sunos&#39; or * &#39;win32&#39;. Returns the value of [process.platform][]. * @returns the operating system platform */ os.platform = function() {} /** * Returns the operating system&#39;s default directory for temporary * files. * @returns the operating system&#39;s default directory for temporary files */ os.tmpdir = function() {} exports = os;
PypiClean
/My_Learn_Messenger_Server-0.1.1.tar.gz/My_Learn_Messenger_Server-0.1.1/сhatserver/server/server_core.py
import binascii import hmac import json import os import select import socket import threading from chatserver.сhatserver.Log.Log_decorators import login_required from chatserver.сhatserver.Log.server_log_config import server_log from chatserver.сhatserver.Mainlib.JIM import get_message, send_message from chatserver.сhatserver.Mainlib.descriptors import Port, IPAddress from chatserver.сhatserver.Mainlib.variables import DESTINATION, SENDER, ACTION, PRESENCE, TIME, USER, MESSAGE_TEXT, \ MESSAGE, RESPONSE_200, RESPONSE_400, ERROR, EXIT, ACCOUNT_NAME, GET_CONTACTS, RESPONSE_202, LIST_INFO, ADD_CONTACT, \ REMOVE_CONTACT, ONLINE_USERS_REQUEST, USERS_REQUEST, PUBLIC_KEY_REQUEST, RESPONSE_511, DATA, RESPONSE, PUBLIC_KEY, \ RESPONSE_205 class MessageProcessor(threading.Thread): """ The main server class. Accepts connections, dictionaries - packages from clients, processes incoming messages. Works as a separate thread. """ port = Port() addr = IPAddress() def __init__(self, listen_address, listen_port, database): # Параментры подключения self.addr = listen_address self.port = listen_port # База данных сервера self.database = database # Сокет, через который будет осуществляться работа self.sock = None # Список подключённых клиентов. self.clients = [] # Сокеты self.listen_sockets = None self.error_sockets = None # Флаг продолжения работы self.running = True # Словарь содержащий сопоставленные имена и соответствующие им сокеты. self.names = dict() # Конструктор предка super().__init__() def run(self): """ Thread main loop function :return: """ # Инициализация Сокета self.init_socket() # Основной цикл программы сервера while self.running: # Ждём подключения, если таймаут вышел, ловим исключение. try: client, client_address = self.sock.accept() except OSError: pass else: server_log.info(f'Установлено соедение с ПК {client_address}') client.settimeout(5) self.clients.append(client) recv_data_lst = [] send_data_lst = [] err_lst = [] # Проверяем на наличие ждущих клиентов try: if self.clients: recv_data_lst, self.listen_sockets, self.error_sockets = select.select( self.clients, self.clients, [], 0) except OSError as err: server_log.error(f'Ошибка работы с сокетами: {err.errno}') # принимаем сообщения и если ошибка, исключаем клиента. if recv_data_lst: for client_with_message in recv_data_lst: try: self.process_client_message( get_message(client_with_message), client_with_message) except (OSError, json.JSONDecodeError, TypeError) as err: server_log.debug(f'Getting data from client exception.', exc_info=err) self.remove_client(client_with_message) def remove_client(self, client): """ Client handler method with which the connection was interrupted. Searches for a client and removes him from the lists and base :param client: :return: """ server_log.info(f'Клиент {client.getpeername()} отключился от сервера.') for name in self.names: if self.names[name] == client: self.database.user_logout(name) del self.names[name] break self.clients.remove(client) client.close() def init_socket(self): """Server socket initialize""" server_log.info( f'Запущен сервер, порт для подключений: {self.port}, адрес с которого принимаются подключения: {self.addr}.' f' Если адрес не указан, принимаются соединения с любых адресов.') # Готовим сокет transport = socket.socket(socket.AF_INET, socket.SOCK_STREAM) transport.bind((self.addr, self.port)) transport.settimeout(0.5) # Начинаем слушать сокет. self.sock = transport self.sock.listen(10240) def process_message(self, message): """ Method of sending message to client :param message: :return: """ if message[DESTINATION] in self.names and self.names[message[DESTINATION] ] in self.listen_sockets: try: send_message(self.names[message[DESTINATION]], message) server_log.info( f'Отправлено сообщение пользователю {message[DESTINATION]} от пользователя {message[SENDER]}.') except OSError: self.remove_client(message[DESTINATION]) elif message[DESTINATION] in self.names and self.names[message[DESTINATION]] not in self.listen_sockets: server_log.error( f'Связь с клиентом {message[DESTINATION]} была потеряна. Соединение закрыто, доставка невозможна.') self.remove_client(self.names[message[DESTINATION]]) else: server_log.error( f'Пользователь {message[DESTINATION]} не зарегистрирован на сервере, отправка сообщения невозможна.') @login_required def process_client_message(self, message, client): """ Function for processing incoming messages. :param message: :param client: :return: """ server_log.debug(f'Разбор сообщения от клиента : {message}') # Если это сообщение о присутствии, принимаем и отвечаем if ACTION in message and message[ACTION] == PRESENCE and TIME in message and USER in message: # Если сообщение о присутствии то вызываем функцию авторизации. self.autorize_user(message, client) # Если это сообщение, то отправляем его получателю. elif ACTION in message and message[ACTION] == MESSAGE and DESTINATION in message and TIME in message \ and SENDER in message and MESSAGE_TEXT in message and self.names[message[SENDER]] == client: if message[DESTINATION] in self.names: self.database.process_message( message[SENDER], message[DESTINATION]) self.process_message(message) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) else: response = RESPONSE_400 response[ERROR] = 'Пользователь не зарегистрирован на сервере.' try: send_message(client, response) except OSError: pass return # Если клиент выходит elif ACTION in message and message[ACTION] == EXIT and ACCOUNT_NAME in message \ and self.names[message[ACCOUNT_NAME]] == client: self.remove_client(client) # Если это запрос контакт-листа elif ACTION in message and message[ACTION] == GET_CONTACTS and USER in message and \ self.names[message[USER]] == client: response = RESPONSE_202 response[LIST_INFO] = self.database.get_contacts(message[USER]) try: send_message(client, response) except OSError: self.remove_client(client) # Если это добавление контакта elif ACTION in message and message[ACTION] == ADD_CONTACT and ACCOUNT_NAME in message and USER in message \ and self.names[message[USER]] == client: self.database.add_contact(message[USER], message[ACCOUNT_NAME]) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) # Если это удаление контакта elif ACTION in message and message[ACTION] == REMOVE_CONTACT and ACCOUNT_NAME in message and USER in message \ and self.names[message[USER]] == client: self.database.remove_contact(message[USER], message[ACCOUNT_NAME]) try: send_message(client, RESPONSE_200) except OSError: self.remove_client(client) # Если это запрос пользователей онлайн elif ACTION in message and message[ACTION] == ONLINE_USERS_REQUEST in message and \ self.names[message[ACCOUNT_NAME]] == client: self.database.active_users_list() response = RESPONSE_202 response[LIST_INFO] = [name[0] for name in self.database.active_users_list().count()] try: send_message(client, RESPONSE_202) except OSError: self.remove_client(client) # Если это запрос известных пользователей elif ACTION in message and message[ACTION] == USERS_REQUEST and ACCOUNT_NAME in message \ and self.names[message[ACCOUNT_NAME]] == client: response = RESPONSE_202 response[LIST_INFO] = [user[0] for user in self.database.users_list()] try: send_message(client, response) except OSError: self.remove_client(client) # Если это запрос публичного ключа пользователя elif ACTION in message and message[ACTION] == PUBLIC_KEY_REQUEST and ACCOUNT_NAME in message: response = RESPONSE_511 response[DATA] = self.database.get_pubkey(message[ACCOUNT_NAME]) # может быть, что ключа ещё нет (пользователь никогда не логинился, # тогда шлём 400) if response[DATA]: try: send_message(client, response) except OSError: self.remove_client(client) else: response = RESPONSE_400 response[ERROR] = 'Нет публичного ключа для данного пользователя' try: send_message(client, response) except OSError: self.remove_client(client) # Иначе отдаём Bad request else: response = RESPONSE_400 response[ERROR] = 'Запрос некорректен.' try: send_message(client, response) except OSError: self.remove_client(client) def autorize_user(self, message, sock): """ Function that implements user authorization :param message: :param sock: :return: """ # Если имя пользователя уже занято то возвращаем 400 server_log.debug(f'Start auth process for {message[USER]}') if message[USER][ACCOUNT_NAME] in self.names.keys(): response = RESPONSE_400 response[ERROR] = 'Имя пользователя уже занято.' try: server_log.debug(f'Username busy, sending {response}') send_message(sock, response) except OSError: server_log.debug('OS Error') pass self.clients.remove(sock) sock.close() # Проверяем что пользователь зарегистрирован на сервере. elif not self.database.check_user(message[USER][ACCOUNT_NAME]): response = RESPONSE_400 response[ERROR] = 'Пользователь не зарегистрирован.' try: server_log.debug(f'Unknown username, sending {response}') send_message(sock, response) except OSError: pass self.clients.remove(sock) sock.close() else: server_log.debug('Correct username, starting passwd check.') # Иначе отвечаем 511 и проводим процедуру авторизации # Словарь - заготовка message_auth = RESPONSE_511 # Набор байтов в hex представлении random_str = binascii.hexlify(os.urandom(64)) # В словарь байты нельзя, декодируем (json.dumps -> TypeError) message_auth[DATA] = random_str.decode('ascii') # Создаём хэш пароля и связки с рандомной строкой, сохраняем # серверную версию ключа hash = hmac.new(self.database.get_hash(message[USER][ACCOUNT_NAME]), random_str, 'MD5') digest = hash.digest() server_log.debug(f'Auth message = {message_auth}') try: # Обмен с клиентом send_message(sock, message_auth) ans = get_message(sock) except OSError as err: server_log.debug('Error in auth, data:', exc_info=err) sock.close() return client_digest = binascii.a2b_base64(ans[DATA]) # Если ответ клиента корректный, то сохраняем его в список # пользователей. if RESPONSE in ans and ans[RESPONSE] == 511 and hmac.compare_digest( digest, client_digest): self.names[message[USER][ACCOUNT_NAME]] = sock client_ip, client_port = sock.getpeername() try: send_message(sock, RESPONSE_200) except OSError: self.remove_client(message[USER][ACCOUNT_NAME]) # добавляем пользователя в список активных и если у него изменился открытый ключ # сохраняем новый self.database.user_login( message[USER][ACCOUNT_NAME], client_ip, client_port, message[USER][PUBLIC_KEY]) else: response = RESPONSE_400 response[ERROR] = 'Неверный пароль.' try: send_message(sock, response) except OSError: pass self.clients.remove(sock) sock.close() def service_update_lists(self): """ Function that implements sending for clients a service message 205 :return: """ for client in self.names: try: send_message(self.names[client], RESPONSE_205) except OSError: self.remove_client(self.names[client])
PypiClean
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/electrum_chi/electrum/network.py
import time import queue import os import random import re from collections import defaultdict import threading import socket import json import sys import ipaddress import asyncio from typing import NamedTuple, Optional, Sequence, List, Dict, Tuple import traceback import dns import dns.resolver import aiorpcx from aiorpcx import TaskGroup from aiohttp import ClientResponse from . import util from .util import (log_exceptions, ignore_exceptions, bfh, SilentTaskGroup, make_aiohttp_session, send_exception_to_crash_reporter, is_hash256_str, is_non_negative_integer) from .bitcoin import COIN from . import constants from . import blockchain from . import bitcoin from .blockchain import Blockchain, DISK_HEADER_SIZE from .interface import (Interface, serialize_server, deserialize_server, RequestTimedOut, NetworkTimeout, BUCKET_NAME_OF_ONION_SERVERS, NetworkException) from .version import PROTOCOL_VERSION from .simple_config import SimpleConfig from .i18n import _ from .logging import get_logger, Logger _logger = get_logger(__name__) NODES_RETRY_INTERVAL = 60 SERVER_RETRY_INTERVAL = 10 NUM_TARGET_CONNECTED_SERVERS = 10 NUM_RECENT_SERVERS = 20 def parse_servers(result: Sequence[Tuple[str, str, List[str]]]) -> Dict[str, dict]: """ parse servers list into dict format""" servers = {} for item in result: host = item[1] out = {} version = None pruning_level = '-' if len(item) > 2: for v in item[2]: if re.match(r"[st]\d*", v): protocol, port = v[0], v[1:] if port == '': port = constants.net.DEFAULT_PORTS[protocol] out[protocol] = port elif re.match("v(.?)+", v): version = v[1:] elif re.match(r"p\d*", v): pruning_level = v[1:] if pruning_level == '': pruning_level = '0' if out: out['pruning'] = pruning_level out['version'] = version servers[host] = out return servers def filter_version(servers): def is_recent(version): try: return util.versiontuple(version) >= util.versiontuple(PROTOCOL_VERSION) except Exception as e: return False return {k: v for k, v in servers.items() if is_recent(v.get('version'))} def filter_noonion(servers): return {k: v for k, v in servers.items() if not k.endswith('.onion')} def filter_protocol(hostmap, protocol='s'): '''Filters the hostmap for those implementing protocol. The result is a list in serialized form.''' eligible = [] for host, portmap in hostmap.items(): port = portmap.get(protocol) if port: eligible.append(serialize_server(host, port, protocol)) return eligible def pick_random_server(hostmap=None, protocol='s', exclude_set=None): if hostmap is None: hostmap = constants.net.DEFAULT_SERVERS if exclude_set is None: exclude_set = set() eligible = list(set(filter_protocol(hostmap, protocol)) - exclude_set) return random.choice(eligible) if eligible else None class NetworkParameters(NamedTuple): host: str port: str protocol: str proxy: Optional[dict] auto_connect: bool oneserver: bool = False proxy_modes = ['socks4', 'socks5'] def serialize_proxy(p): if not isinstance(p, dict): return None return ':'.join([p.get('mode'), p.get('host'), p.get('port'), p.get('user', ''), p.get('password', '')]) def deserialize_proxy(s: str) -> Optional[dict]: if not isinstance(s, str): return None if s.lower() == 'none': return None proxy = { "mode":"socks5", "host":"localhost" } # FIXME raw IPv6 address fails here args = s.split(':') n = 0 if proxy_modes.count(args[n]) == 1: proxy["mode"] = args[n] n += 1 if len(args) > n: proxy["host"] = args[n] n += 1 if len(args) > n: proxy["port"] = args[n] n += 1 else: proxy["port"] = "8080" if proxy["mode"] == "http" else "1080" if len(args) > n: proxy["user"] = args[n] n += 1 if len(args) > n: proxy["password"] = args[n] return proxy class BestEffortRequestFailed(NetworkException): pass class TxBroadcastError(NetworkException): def get_message_for_gui(self): raise NotImplementedError() class TxBroadcastHashMismatch(TxBroadcastError): def get_message_for_gui(self): return "{}\n{}\n\n{}" \ .format(_("The server returned an unexpected transaction ID when broadcasting the transaction."), _("Consider trying to connect to a different server, or updating Electrum-CHI."), str(self)) class TxBroadcastServerReturnedError(TxBroadcastError): def get_message_for_gui(self): return "{}\n{}\n\n{}" \ .format(_("The server returned an error when broadcasting the transaction."), _("Consider trying to connect to a different server, or updating Electrum-CHI."), str(self)) class TxBroadcastUnknownError(TxBroadcastError): def get_message_for_gui(self): return "{}\n{}" \ .format(_("Unknown error when broadcasting the transaction."), _("Consider trying to connect to a different server, or updating Electrum-CHI.")) class UntrustedServerReturnedError(NetworkException): def __init__(self, *, original_exception): self.original_exception = original_exception def __str__(self): return _("The server returned an error.") def __repr__(self): return f"<UntrustedServerReturnedError original_exception: {repr(self.original_exception)}>" INSTANCE = None class Network(Logger): """The Network class manages a set of connections to remote electrum servers, each connected socket is handled by an Interface() object. """ LOGGING_SHORTCUT = 'n' def __init__(self, config: SimpleConfig=None): global INSTANCE INSTANCE = self Logger.__init__(self) self.asyncio_loop = asyncio.get_event_loop() assert self.asyncio_loop.is_running(), "event loop not running" self._loop_thread = None # type: threading.Thread # set by caller; only used for sanity checks if config is None: config = {} # Do not use mutables as default values! self.config = SimpleConfig(config) if isinstance(config, dict) else config # type: SimpleConfig blockchain.read_blockchains(self.config) self.logger.info(f"blockchains {list(map(lambda b: b.forkpoint, blockchain.blockchains.values()))}") self._blockchain_preferred_block = self.config.get('blockchain_preferred_block', None) # type: Optional[Dict] self._blockchain = blockchain.get_best_chain() # Server for addresses and transactions self.default_server = self.config.get('server', None) # Sanitize default server if self.default_server: try: deserialize_server(self.default_server) except: self.logger.warning('failed to parse server-string; falling back to random.') self.default_server = None if not self.default_server: self.default_server = pick_random_server() self.main_taskgroup = None # type: TaskGroup # locks self.restart_lock = asyncio.Lock() self.bhi_lock = asyncio.Lock() self.callback_lock = threading.Lock() self.recent_servers_lock = threading.RLock() # <- re-entrant self.interfaces_lock = threading.Lock() # for mutating/iterating self.interfaces self.server_peers = {} # returned by interface (servers that the main interface knows about) self.recent_servers = self._read_recent_servers() # note: needs self.recent_servers_lock self.banner = '' self.donation_address = '' self.relay_fee = None # type: Optional[int] # callbacks set by the GUI self.callbacks = defaultdict(list) # note: needs self.callback_lock dir_path = os.path.join(self.config.path, 'certs') util.make_dir(dir_path) # retry times self.server_retry_time = time.time() self.nodes_retry_time = time.time() # the main server we are currently communicating with self.interface = None # type: Interface # set of servers we have an ongoing connection with self.interfaces = {} # type: Dict[str, Interface] self.auto_connect = self.config.get('auto_connect', True) self.connecting = set() self.server_queue = None self.proxy = None # Dump network messages (all interfaces). Set at runtime from the console. self.debug = False self._set_status('disconnected') def run_from_another_thread(self, coro): assert self._loop_thread != threading.current_thread(), 'must not be called from network thread' fut = asyncio.run_coroutine_threadsafe(coro, self.asyncio_loop) return fut.result() @staticmethod def get_instance() -> Optional["Network"]: return INSTANCE def with_recent_servers_lock(func): def func_wrapper(self, *args, **kwargs): with self.recent_servers_lock: return func(self, *args, **kwargs) return func_wrapper def register_callback(self, callback, events): with self.callback_lock: for event in events: self.callbacks[event].append(callback) def unregister_callback(self, callback): with self.callback_lock: for callbacks in self.callbacks.values(): if callback in callbacks: callbacks.remove(callback) def trigger_callback(self, event, *args): with self.callback_lock: callbacks = self.callbacks[event][:] for callback in callbacks: # FIXME: if callback throws, we will lose the traceback if asyncio.iscoroutinefunction(callback): asyncio.run_coroutine_threadsafe(callback(event, *args), self.asyncio_loop) else: self.asyncio_loop.call_soon_threadsafe(callback, event, *args) def _read_recent_servers(self): if not self.config.path: return [] path = os.path.join(self.config.path, "recent_servers") try: with open(path, "r", encoding='utf-8') as f: data = f.read() return json.loads(data) except: return [] @with_recent_servers_lock def _save_recent_servers(self): if not self.config.path: return path = os.path.join(self.config.path, "recent_servers") s = json.dumps(self.recent_servers, indent=4, sort_keys=True) try: with open(path, "w", encoding='utf-8') as f: f.write(s) except: pass def get_server_height(self): interface = self.interface return interface.tip if interface else 0 async def _server_is_lagging(self): sh = self.get_server_height() if not sh: self.logger.info('no height for main interface') return True lh = self.get_local_height() result = (lh - sh) > 1 if result: self.logger.info(f'{self.default_server} is lagging ({sh} vs {lh})') return result def _set_status(self, status): self.connection_status = status self.notify('status') def is_connected(self): interface = self.interface return interface is not None and interface.ready.done() def is_connecting(self): return self.connection_status == 'connecting' async def _request_server_info(self, interface): await interface.ready session = interface.session async def get_banner(): self.banner = await session.send_request('server.banner') self.notify('banner') async def get_donation_address(): addr = await session.send_request('server.donation_address') if not bitcoin.is_address(addr): if addr: # ignore empty string self.logger.info(f"invalid donation address from server: {repr(addr)}") addr = '' self.donation_address = addr async def get_server_peers(): server_peers = await session.send_request('server.peers.subscribe') random.shuffle(server_peers) max_accepted_peers = len(constants.net.DEFAULT_SERVERS) + NUM_RECENT_SERVERS server_peers = server_peers[:max_accepted_peers] self.server_peers = parse_servers(server_peers) self.notify('servers') async def get_relay_fee(): relayfee = await session.send_request('blockchain.relayfee') if relayfee is None: self.relay_fee = None else: relayfee = int(relayfee * COIN) self.relay_fee = max(0, relayfee) async with TaskGroup() as group: await group.spawn(get_banner) await group.spawn(get_donation_address) await group.spawn(get_server_peers) await group.spawn(get_relay_fee) await group.spawn(self._request_fee_estimates(interface)) async def _request_fee_estimates(self, interface): session = interface.session from .simple_config import FEE_ETA_TARGETS self.config.requested_fee_estimates() async with TaskGroup() as group: histogram_task = await group.spawn(session.send_request('mempool.get_fee_histogram')) fee_tasks = [] for i in FEE_ETA_TARGETS: fee_tasks.append((i, await group.spawn(session.send_request('blockchain.estimatefee', [i])))) self.config.mempool_fees = histogram = histogram_task.result() self.logger.info(f'fee_histogram {histogram}') self.notify('fee_histogram') fee_estimates_eta = {} for nblock_target, task in fee_tasks: fee = int(task.result() * COIN) fee_estimates_eta[nblock_target] = fee if fee < 0: continue self.config.update_fee_estimates(nblock_target, fee) self.logger.info(f'fee_estimates {fee_estimates_eta}') self.notify('fee') def get_status_value(self, key): if key == 'status': value = self.connection_status elif key == 'banner': value = self.banner elif key == 'fee': value = self.config.fee_estimates elif key == 'fee_histogram': value = self.config.mempool_fees elif key == 'servers': value = self.get_servers() else: raise Exception('unexpected trigger key {}'.format(key)) return value def notify(self, key): if key in ['status', 'updated']: self.trigger_callback(key) else: self.trigger_callback(key, self.get_status_value(key)) def get_parameters(self) -> NetworkParameters: host, port, protocol = deserialize_server(self.default_server) return NetworkParameters(host=host, port=port, protocol=protocol, proxy=self.proxy, auto_connect=self.auto_connect, oneserver=self.oneserver) def get_donation_address(self): if self.is_connected(): return self.donation_address def get_interfaces(self) -> List[str]: """The list of servers for the connected interfaces.""" with self.interfaces_lock: return list(self.interfaces) @with_recent_servers_lock def get_servers(self): # note: order of sources when adding servers here is crucial! # don't let "server_peers" overwrite anything, # otherwise main server can eclipse the client out = dict() # add servers received from main interface server_peers = self.server_peers if server_peers: out.update(filter_version(server_peers.copy())) # hardcoded servers out.update(constants.net.DEFAULT_SERVERS) # add recent servers for s in self.recent_servers: try: host, port, protocol = deserialize_server(s) except: continue if host in out: out[host].update({protocol: port}) else: out[host] = {protocol: port} # potentially filter out some if self.config.get('noonion'): out = filter_noonion(out) return out def _start_interface(self, server: str): if server not in self.interfaces and server not in self.connecting: if server == self.default_server: self.logger.info(f"connecting to {server} as new interface") self._set_status('connecting') self.connecting.add(server) self.server_queue.put(server) def _start_random_interface(self): with self.interfaces_lock: exclude_set = self.disconnected_servers | set(self.interfaces) | self.connecting server = pick_random_server(self.get_servers(), self.protocol, exclude_set) if server: self._start_interface(server) return server def _set_proxy(self, proxy: Optional[dict]): self.proxy = proxy # Store these somewhere so we can un-monkey-patch if not hasattr(socket, "_getaddrinfo"): socket._getaddrinfo = socket.getaddrinfo if proxy: self.logger.info(f'setting proxy {proxy}') # prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))] else: if sys.platform == 'win32': # On Windows, socket.getaddrinfo takes a mutex, and might hold it for up to 10 seconds # when dns-resolving. To speed it up drastically, we resolve dns ourselves, outside that lock. # see #4421 socket.getaddrinfo = self._fast_getaddrinfo else: socket.getaddrinfo = socket._getaddrinfo self.trigger_callback('proxy_set', self.proxy) @staticmethod def _fast_getaddrinfo(host, *args, **kwargs): def needs_dns_resolving(host): try: ipaddress.ip_address(host) return False # already valid IP except ValueError: pass # not an IP if str(host) in ('localhost', 'localhost.',): return False return True def resolve_with_dnspython(host): addrs = [] # try IPv6 try: answers = dns.resolver.query(host, dns.rdatatype.AAAA) addrs += [str(answer) for answer in answers] except dns.exception.DNSException as e: pass except BaseException as e: _logger.info(f'dnspython failed to resolve dns (AAAA) with error: {e}') # try IPv4 try: answers = dns.resolver.query(host, dns.rdatatype.A) addrs += [str(answer) for answer in answers] except dns.exception.DNSException as e: # dns failed for some reason, e.g. dns.resolver.NXDOMAIN this is normal. # Simply report back failure; except if we already have some results. if not addrs: raise socket.gaierror(11001, 'getaddrinfo failed') from e except BaseException as e: # Possibly internal error in dnspython :( see #4483 _logger.info(f'dnspython failed to resolve dns (A) with error: {e}') if addrs: return addrs # Fall back to original socket.getaddrinfo to resolve dns. return [host] addrs = [host] if needs_dns_resolving(host): addrs = resolve_with_dnspython(host) list_of_list_of_socketinfos = [socket._getaddrinfo(addr, *args, **kwargs) for addr in addrs] list_of_socketinfos = [item for lst in list_of_list_of_socketinfos for item in lst] return list_of_socketinfos @log_exceptions async def set_parameters(self, net_params: NetworkParameters): proxy = net_params.proxy proxy_str = serialize_proxy(proxy) host, port, protocol = net_params.host, net_params.port, net_params.protocol server_str = serialize_server(host, port, protocol) # sanitize parameters try: deserialize_server(serialize_server(host, port, protocol)) if proxy: proxy_modes.index(proxy['mode']) + 1 int(proxy['port']) except: return self.config.set_key('auto_connect', net_params.auto_connect, False) self.config.set_key('oneserver', net_params.oneserver, False) self.config.set_key('proxy', proxy_str, False) self.config.set_key('server', server_str, True) # abort if changes were not allowed by config if self.config.get('server') != server_str \ or self.config.get('proxy') != proxy_str \ or self.config.get('oneserver') != net_params.oneserver: return async with self.restart_lock: self.auto_connect = net_params.auto_connect if self.proxy != proxy or self.protocol != protocol or self.oneserver != net_params.oneserver: # Restart the network defaulting to the given server await self._stop() self.default_server = server_str await self._start() elif self.default_server != server_str: await self.switch_to_interface(server_str) else: await self.switch_lagging_interface() def _set_oneserver(self, oneserver: bool): self.num_server = NUM_TARGET_CONNECTED_SERVERS if not oneserver else 0 self.oneserver = bool(oneserver) async def _switch_to_random_interface(self): '''Switch to a random connected server other than the current one''' servers = self.get_interfaces() # Those in connected state if self.default_server in servers: servers.remove(self.default_server) if servers: await self.switch_to_interface(random.choice(servers)) async def switch_lagging_interface(self): '''If auto_connect and lagging, switch interface''' if self.auto_connect and await self._server_is_lagging(): # switch to one that has the correct header (not height) best_header = self.blockchain().read_header(self.get_local_height()) with self.interfaces_lock: interfaces = list(self.interfaces.values()) filtered = list(filter(lambda iface: iface.tip_header == best_header, interfaces)) if filtered: chosen_iface = random.choice(filtered) await self.switch_to_interface(chosen_iface.server) async def switch_unwanted_fork_interface(self): """If auto_connect and main interface is not on preferred fork, try to switch to preferred fork. """ if not self.auto_connect or not self.interface: return with self.interfaces_lock: interfaces = list(self.interfaces.values()) # try to switch to preferred fork if self._blockchain_preferred_block: pref_height = self._blockchain_preferred_block['height'] pref_hash = self._blockchain_preferred_block['hash'] if self.interface.blockchain.check_hash(pref_height, pref_hash): return # already on preferred fork filtered = list(filter(lambda iface: iface.blockchain.check_hash(pref_height, pref_hash), interfaces)) if filtered: self.logger.info("switching to preferred fork") chosen_iface = random.choice(filtered) await self.switch_to_interface(chosen_iface.server) return else: self.logger.info("tried to switch to preferred fork but no interfaces are on it") # try to switch to best chain if self.blockchain().parent is None: return # already on best chain filtered = list(filter(lambda iface: iface.blockchain.parent is None, interfaces)) if filtered: self.logger.info("switching to best chain") chosen_iface = random.choice(filtered) await self.switch_to_interface(chosen_iface.server) else: # FIXME switch to best available? self.logger.info("tried to switch to best chain but no interfaces are on it") async def switch_to_interface(self, server: str): """Switch to server as our main interface. If no connection exists, queue interface to be started. The actual switch will happen when the interface becomes ready. """ self.default_server = server old_interface = self.interface old_server = old_interface.server if old_interface else None # Stop any current interface in order to terminate subscriptions, # and to cancel tasks in interface.group. # However, for headers sub, give preference to this interface # over unknown ones, i.e. start it again right away. if old_server and old_server != server: await self._close_interface(old_interface) if len(self.interfaces) <= self.num_server: self._start_interface(old_server) if server not in self.interfaces: self.interface = None self._start_interface(server) return i = self.interfaces[server] if old_interface != i: self.logger.info(f"switching to {server}") blockchain_updated = i.blockchain != self.blockchain() self.interface = i await i.group.spawn(self._request_server_info(i)) self.trigger_callback('default_server_changed') self._set_status('connected') self.trigger_callback('network_updated') if blockchain_updated: self.trigger_callback('blockchain_updated') async def _close_interface(self, interface): if interface: with self.interfaces_lock: if self.interfaces.get(interface.server) == interface: self.interfaces.pop(interface.server) if interface.server == self.default_server: self.interface = None await interface.close() @with_recent_servers_lock def _add_recent_server(self, server): # list is ordered if server in self.recent_servers: self.recent_servers.remove(server) self.recent_servers.insert(0, server) self.recent_servers = self.recent_servers[:NUM_RECENT_SERVERS] self._save_recent_servers() async def connection_down(self, interface: Interface): '''A connection to server either went down, or was never made. We distinguish by whether it is in self.interfaces.''' if not interface: return server = interface.server self.disconnected_servers.add(server) if server == self.default_server: self._set_status('disconnected') await self._close_interface(interface) self.trigger_callback('network_updated') def get_network_timeout_seconds(self, request_type=NetworkTimeout.Generic) -> int: if self.oneserver and not self.auto_connect: return request_type.MOST_RELAXED if self.proxy: return request_type.RELAXED return request_type.NORMAL @ignore_exceptions # do not kill main_taskgroup @log_exceptions async def _run_new_interface(self, server): interface = Interface(self, server, self.proxy) # note: using longer timeouts here as DNS can sometimes be slow! timeout = self.get_network_timeout_seconds(NetworkTimeout.Generic) try: await asyncio.wait_for(interface.ready, timeout) except BaseException as e: self.logger.info(f"couldn't launch iface {server} -- {repr(e)}") await interface.close() return else: with self.interfaces_lock: assert server not in self.interfaces self.interfaces[server] = interface finally: try: self.connecting.remove(server) except KeyError: pass if server == self.default_server: await self.switch_to_interface(server) self._add_recent_server(server) self.trigger_callback('network_updated') def check_interface_against_healthy_spread_of_connected_servers(self, iface_to_check) -> bool: # main interface is exempt. this makes switching servers easier if iface_to_check.is_main_server(): return True if not iface_to_check.bucket_based_on_ipaddress(): return True # bucket connected interfaces with self.interfaces_lock: interfaces = list(self.interfaces.values()) if iface_to_check in interfaces: interfaces.remove(iface_to_check) buckets = defaultdict(list) for iface in interfaces: buckets[iface.bucket_based_on_ipaddress()].append(iface) # check proposed server against buckets onion_servers = buckets[BUCKET_NAME_OF_ONION_SERVERS] if iface_to_check.is_tor(): # keep number of onion servers below half of all connected servers if len(onion_servers) > NUM_TARGET_CONNECTED_SERVERS // 2: return False else: bucket = iface_to_check.bucket_based_on_ipaddress() if len(buckets[bucket]) > 0: return False return True async def _init_headers_file(self): b = blockchain.get_best_chain() filename = b.path() length = DISK_HEADER_SIZE * len(constants.net.CHECKPOINTS) * 2016 if not os.path.exists(filename) or os.path.getsize(filename) < length: with open(filename, 'wb') as f: if length > 0: f.seek(length-1) f.write(b'\x00') util.ensure_sparse_file(filename) with b.lock: b.update_size() def best_effort_reliable(func): async def make_reliable_wrapper(self, *args, **kwargs): for i in range(10): iface = self.interface # retry until there is a main interface if not iface: await asyncio.sleep(0.1) continue # try again # wait for it to be usable iface_ready = iface.ready iface_disconnected = iface.got_disconnected await asyncio.wait([iface_ready, iface_disconnected], return_when=asyncio.FIRST_COMPLETED) if not iface_ready.done() or iface_ready.cancelled(): await asyncio.sleep(0.1) continue # try again # try actual request success_fut = asyncio.ensure_future(func(self, *args, **kwargs)) await asyncio.wait([success_fut, iface_disconnected], return_when=asyncio.FIRST_COMPLETED) if success_fut.done() and not success_fut.cancelled(): if success_fut.exception(): try: raise success_fut.exception() except RequestTimedOut: await iface.close() await iface_disconnected continue # try again return success_fut.result() # otherwise; try again raise BestEffortRequestFailed('no interface to do request on... gave up.') return make_reliable_wrapper def catch_server_exceptions(func): async def wrapper(self, *args, **kwargs): try: return await func(self, *args, **kwargs) except aiorpcx.jsonrpc.CodeMessageError as e: raise UntrustedServerReturnedError(original_exception=e) from e return wrapper @best_effort_reliable @catch_server_exceptions async def get_merkle_for_transaction(self, tx_hash: str, tx_height: int) -> dict: if not is_hash256_str(tx_hash): raise Exception(f"{repr(tx_hash)} is not a txid") if not is_non_negative_integer(tx_height): raise Exception(f"{repr(tx_height)} is not a block height") return await self.interface.session.send_request('blockchain.transaction.get_merkle', [tx_hash, tx_height]) @best_effort_reliable async def broadcast_transaction(self, tx, *, timeout=None) -> None: if timeout is None: timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent) try: out = await self.interface.session.send_request('blockchain.transaction.broadcast', [str(tx)], timeout=timeout) # note: both 'out' and exception messages are untrusted input from the server except (RequestTimedOut, asyncio.CancelledError, asyncio.TimeoutError): raise # pass-through except aiorpcx.jsonrpc.CodeMessageError as e: self.logger.info(f"broadcast_transaction error [DO NOT TRUST THIS MESSAGE]: {repr(e)}") raise TxBroadcastServerReturnedError(self.sanitize_tx_broadcast_response(e.message)) from e except BaseException as e: # intentional BaseException for sanity! self.logger.info(f"broadcast_transaction error2 [DO NOT TRUST THIS MESSAGE]: {repr(e)}") send_exception_to_crash_reporter(e) raise TxBroadcastUnknownError() from e if out != tx.txid(): self.logger.info(f"unexpected txid for broadcast_transaction [DO NOT TRUST THIS MESSAGE]: {out} != {tx.txid()}") raise TxBroadcastHashMismatch(_("Server returned unexpected transaction ID.")) @staticmethod def sanitize_tx_broadcast_response(server_msg) -> str: # Unfortunately, bitcoind and hence the Electrum protocol doesn't return a useful error code. # So, we use substring matching to grok the error message. # server_msg is untrusted input so it should not be shown to the user. see #4968 server_msg = str(server_msg) server_msg = server_msg.replace("\n", r"\n") # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/policy/policy.cpp # grep "reason =" policy_error_messages = { r"version": _("Transaction uses non-standard version."), r"tx-size": _("The transaction was rejected because it is too large (in bytes)."), r"scriptsig-size": None, r"scriptsig-not-pushonly": None, r"scriptpubkey": None, r"bare-multisig": None, r"dust": _("Transaction could not be broadcast due to dust outputs."), r"multi-op-return": _("The transaction was rejected because it contains multiple OP_RETURN outputs."), } for substring in policy_error_messages: if substring in server_msg: msg = policy_error_messages[substring] return msg if msg else substring # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/script/script_error.cpp script_error_messages = { r"Script evaluated without error but finished with a false/empty top stack element", r"Script failed an OP_VERIFY operation", r"Script failed an OP_EQUALVERIFY operation", r"Script failed an OP_CHECKMULTISIGVERIFY operation", r"Script failed an OP_CHECKSIGVERIFY operation", r"Script failed an OP_NUMEQUALVERIFY operation", r"Script is too big", r"Push value size limit exceeded", r"Operation limit exceeded", r"Stack size limit exceeded", r"Signature count negative or greater than pubkey count", r"Pubkey count negative or limit exceeded", r"Opcode missing or not understood", r"Attempted to use a disabled opcode", r"Operation not valid with the current stack size", r"Operation not valid with the current altstack size", r"OP_RETURN was encountered", r"Invalid OP_IF construction", r"Negative locktime", r"Locktime requirement not satisfied", r"Signature hash type missing or not understood", r"Non-canonical DER signature", r"Data push larger than necessary", r"Only non-push operators allowed in signatures", r"Non-canonical signature: S value is unnecessarily high", r"Dummy CHECKMULTISIG argument must be zero", r"OP_IF/NOTIF argument must be minimal", r"Signature must be zero for failed CHECK(MULTI)SIG operation", r"NOPx reserved for soft-fork upgrades", r"Witness version reserved for soft-fork upgrades", r"Public key is neither compressed or uncompressed", r"Extra items left on stack after execution", r"Witness program has incorrect length", r"Witness program was passed an empty witness", r"Witness program hash mismatch", r"Witness requires empty scriptSig", r"Witness requires only-redeemscript scriptSig", r"Witness provided for non-witness script", r"Using non-compressed keys in segwit", r"Using OP_CODESEPARATOR in non-witness script", r"Signature is found in scriptCode", } for substring in script_error_messages: if substring in server_msg: return substring # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/validation.cpp # grep "REJECT_" # should come after script_error.cpp (due to e.g. non-mandatory-script-verify-flag) validation_error_messages = { r"coinbase", r"tx-size-small", r"non-final", r"txn-already-in-mempool", r"txn-mempool-conflict", r"txn-already-known", r"non-BIP68-final", r"bad-txns-nonstandard-inputs", r"bad-witness-nonstandard", r"bad-txns-too-many-sigops", r"mempool min fee not met", r"min relay fee not met", r"absurdly-high-fee", r"too-long-mempool-chain", r"bad-txns-spends-conflicting-tx", r"insufficient fee", r"too many potential replacements", r"replacement-adds-unconfirmed", r"mempool full", r"non-mandatory-script-verify-flag", r"mandatory-script-verify-flag-failed", } for substring in validation_error_messages: if substring in server_msg: return substring # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/rpc/rawtransaction.cpp # grep "RPC_TRANSACTION" # grep "RPC_DESERIALIZATION_ERROR" # https://github.com/bitcoin/bitcoin/blob/d7d7d315060620446bd363ca50f95f79d3260db7/src/util/error.cpp rawtransaction_error_messages = { r"Missing inputs", r"transaction already in block chain", r"Transaction already in block chain", r"TX decode failed", r"Peer-to-peer functionality missing or disabled", r"Transaction rejected by AcceptToMemoryPool", r"AcceptToMemoryPool failed", } for substring in rawtransaction_error_messages: if substring in server_msg: return substring # https://github.com/bitcoin/bitcoin/blob/cd42553b1178a48a16017eff0b70669c84c3895c/src/consensus/tx_verify.cpp # grep "REJECT_" tx_verify_error_messages = { r"bad-txns-vin-empty", r"bad-txns-vout-empty", r"bad-txns-oversize", r"bad-txns-vout-negative", r"bad-txns-vout-toolarge", r"bad-txns-txouttotal-toolarge", r"bad-txns-inputs-duplicate", r"bad-cb-length", r"bad-txns-prevout-null", r"bad-txns-inputs-missingorspent", r"bad-txns-premature-spend-of-coinbase", r"bad-txns-inputvalues-outofrange", r"bad-txns-in-belowout", r"bad-txns-fee-outofrange", } for substring in tx_verify_error_messages: if substring in server_msg: return substring # otherwise: return _("Unknown error") @best_effort_reliable @catch_server_exceptions async def request_chunk(self, height: int, tip=None, *, can_return_early=False): if not is_non_negative_integer(height): raise Exception(f"{repr(height)} is not a block height") return await self.interface.request_chunk(height, tip=tip, can_return_early=can_return_early) @best_effort_reliable @catch_server_exceptions async def get_transaction(self, tx_hash: str, *, timeout=None) -> str: if not is_hash256_str(tx_hash): raise Exception(f"{repr(tx_hash)} is not a txid") return await self.interface.session.send_request('blockchain.transaction.get', [tx_hash], timeout=timeout) @best_effort_reliable @catch_server_exceptions async def get_history_for_scripthash(self, sh: str) -> List[dict]: if not is_hash256_str(sh): raise Exception(f"{repr(sh)} is not a scripthash") return await self.interface.session.send_request('blockchain.scripthash.get_history', [sh]) @best_effort_reliable @catch_server_exceptions async def listunspent_for_scripthash(self, sh: str) -> List[dict]: if not is_hash256_str(sh): raise Exception(f"{repr(sh)} is not a scripthash") return await self.interface.session.send_request('blockchain.scripthash.listunspent', [sh]) @best_effort_reliable @catch_server_exceptions async def get_balance_for_scripthash(self, sh: str) -> dict: if not is_hash256_str(sh): raise Exception(f"{repr(sh)} is not a scripthash") return await self.interface.session.send_request('blockchain.scripthash.get_balance', [sh]) def blockchain(self) -> Blockchain: interface = self.interface if interface and interface.blockchain is not None: self._blockchain = interface.blockchain return self._blockchain def get_blockchains(self): out = {} # blockchain_id -> list(interfaces) with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items()) with self.interfaces_lock: interfaces_values = list(self.interfaces.values()) for chain_id, bc in blockchain_items: r = list(filter(lambda i: i.blockchain==bc, interfaces_values)) if r: out[chain_id] = r return out def _set_preferred_chain(self, chain: Blockchain): height = chain.get_max_forkpoint() header_hash = chain.get_hash(height) self._blockchain_preferred_block = { 'height': height, 'hash': header_hash, } self.config.set_key('blockchain_preferred_block', self._blockchain_preferred_block) async def follow_chain_given_id(self, chain_id: str) -> None: bc = blockchain.blockchains.get(chain_id) if not bc: raise Exception('blockchain {} not found'.format(chain_id)) self._set_preferred_chain(bc) # select server on this chain with self.interfaces_lock: interfaces = list(self.interfaces.values()) interfaces_on_selected_chain = list(filter(lambda iface: iface.blockchain == bc, interfaces)) if len(interfaces_on_selected_chain) == 0: return chosen_iface = random.choice(interfaces_on_selected_chain) # switch to server (and save to config) net_params = self.get_parameters() host, port, protocol = deserialize_server(chosen_iface.server) net_params = net_params._replace(host=host, port=port, protocol=protocol) await self.set_parameters(net_params) async def follow_chain_given_server(self, server_str: str) -> None: # note that server_str should correspond to a connected interface iface = self.interfaces.get(server_str) if iface is None: return self._set_preferred_chain(iface.blockchain) # switch to server (and save to config) net_params = self.get_parameters() host, port, protocol = deserialize_server(server_str) net_params = net_params._replace(host=host, port=port, protocol=protocol) await self.set_parameters(net_params) def get_local_height(self): return self.blockchain().height() def export_checkpoints(self, path): """Run manually to generate blockchain checkpoints. Kept for console use only. """ cp = self.blockchain().get_checkpoints() with open(path, 'w', encoding='utf-8') as f: f.write(json.dumps(cp, indent=4)) async def _start(self): assert not self.main_taskgroup self.main_taskgroup = main_taskgroup = SilentTaskGroup() assert not self.interface and not self.interfaces assert not self.connecting and not self.server_queue self.logger.info('starting network') self.disconnected_servers = set([]) self.protocol = deserialize_server(self.default_server)[2] self.server_queue = queue.Queue() self._set_proxy(deserialize_proxy(self.config.get('proxy'))) self._set_oneserver(self.config.get('oneserver', False)) self._start_interface(self.default_server) async def main(): try: await self._init_headers_file() # note: if a task finishes with CancelledError, that # will NOT raise, and the group will keep the other tasks running async with main_taskgroup as group: await group.spawn(self._maintain_sessions()) [await group.spawn(job) for job in self._jobs] except Exception as e: self.logger.exception('') raise e asyncio.run_coroutine_threadsafe(main(), self.asyncio_loop) self.trigger_callback('network_updated') def start(self, jobs: List=None): self._jobs = jobs or [] asyncio.run_coroutine_threadsafe(self._start(), self.asyncio_loop) @log_exceptions async def _stop(self, full_shutdown=False): self.logger.info("stopping network") try: await asyncio.wait_for(self.main_taskgroup.cancel_remaining(), timeout=2) except (asyncio.TimeoutError, asyncio.CancelledError) as e: self.logger.info(f"exc during main_taskgroup cancellation: {repr(e)}") self.main_taskgroup = None # type: TaskGroup self.interface = None # type: Interface self.interfaces = {} # type: Dict[str, Interface] self.connecting.clear() self.server_queue = None if not full_shutdown: self.trigger_callback('network_updated') def stop(self): assert self._loop_thread != threading.current_thread(), 'must not be called from network thread' fut = asyncio.run_coroutine_threadsafe(self._stop(full_shutdown=True), self.asyncio_loop) try: fut.result(timeout=2) except (asyncio.TimeoutError, asyncio.CancelledError): pass async def _ensure_there_is_a_main_interface(self): if self.is_connected(): return now = time.time() # if auto_connect is set, try a different server if self.auto_connect and not self.is_connecting(): await self._switch_to_random_interface() # if auto_connect is not set, or still no main interface, retry current if not self.is_connected() and not self.is_connecting(): if self.default_server in self.disconnected_servers: if now - self.server_retry_time > SERVER_RETRY_INTERVAL: self.disconnected_servers.remove(self.default_server) self.server_retry_time = now else: await self.switch_to_interface(self.default_server) async def _maintain_sessions(self): async def launch_already_queued_up_new_interfaces(): while self.server_queue.qsize() > 0: server = self.server_queue.get() await self.main_taskgroup.spawn(self._run_new_interface(server)) async def maybe_queue_new_interfaces_to_be_launched_later(): now = time.time() for i in range(self.num_server - len(self.interfaces) - len(self.connecting)): # FIXME this should try to honour "healthy spread of connected servers" self._start_random_interface() if now - self.nodes_retry_time > NODES_RETRY_INTERVAL: self.logger.info('network: retrying connections') self.disconnected_servers = set([]) self.nodes_retry_time = now async def maintain_healthy_spread_of_connected_servers(): with self.interfaces_lock: interfaces = list(self.interfaces.values()) random.shuffle(interfaces) for iface in interfaces: if not self.check_interface_against_healthy_spread_of_connected_servers(iface): self.logger.info(f"disconnecting from {iface.server}. too many connected " f"servers already in bucket {iface.bucket_based_on_ipaddress()}") await self._close_interface(iface) async def maintain_main_interface(): await self._ensure_there_is_a_main_interface() if self.is_connected(): if self.config.is_fee_estimates_update_required(): await self.interface.group.spawn(self._request_fee_estimates, self.interface) while True: try: await launch_already_queued_up_new_interfaces() await maybe_queue_new_interfaces_to_be_launched_later() await maintain_healthy_spread_of_connected_servers() await maintain_main_interface() except asyncio.CancelledError: # suppress spurious cancellations group = self.main_taskgroup if not group or group._closed: raise await asyncio.sleep(0.1) @classmethod async def _send_http_on_proxy(cls, method: str, url: str, params: str = None, body: bytes = None, json: dict = None, headers=None, on_finish=None, timeout=None): async def default_on_finish(resp: ClientResponse): resp.raise_for_status() return await resp.text() if headers is None: headers = {} if on_finish is None: on_finish = default_on_finish network = cls.get_instance() proxy = network.proxy if network else None async with make_aiohttp_session(proxy, timeout=timeout) as session: if method == 'get': async with session.get(url, params=params, headers=headers) as resp: return await on_finish(resp) elif method == 'post': assert body is not None or json is not None, 'body or json must be supplied if method is post' if body is not None: async with session.post(url, data=body, headers=headers) as resp: return await on_finish(resp) elif json is not None: async with session.post(url, json=json, headers=headers) as resp: return await on_finish(resp) else: assert False @classmethod def send_http_on_proxy(cls, method, url, **kwargs): network = cls.get_instance() if network: assert network._loop_thread is not threading.currentThread() loop = network.asyncio_loop else: loop = asyncio.get_event_loop() coro = asyncio.run_coroutine_threadsafe(cls._send_http_on_proxy(method, url, **kwargs), loop) # note: _send_http_on_proxy has its own timeout, so no timeout here: return coro.result() # methods used in scripts async def get_peers(self): while not self.is_connected(): await asyncio.sleep(1) session = self.interface.session return parse_servers(await session.send_request('server.peers.subscribe')) async def send_multiple_requests(self, servers: List[str], method: str, params: Sequence): responses = dict() async def get_response(server): interface = Interface(self, server, self.proxy) timeout = self.get_network_timeout_seconds(NetworkTimeout.Urgent) try: await asyncio.wait_for(interface.ready, timeout) except BaseException as e: await interface.close() return try: res = await interface.session.send_request(method, params, timeout=10) except Exception as e: res = e responses[interface.server] = res async with TaskGroup() as group: for server in servers: await group.spawn(get_response(server)) return responses
PypiClean
/Flask-MongoKit-0.6.tar.gz/Flask-MongoKit-0.6/flask_mongokit.py
from __future__ import absolute_import import bson from mongokit import Connection, Database, Collection, Document from werkzeug.routing import BaseConverter from flask import abort, _request_ctx_stack try: # pragma: no cover from flask import _app_ctx_stack ctx_stack = _app_ctx_stack except ImportError: # pragma: no cover ctx_stack = _request_ctx_stack class AuthenticationIncorrect(Exception): pass class BSONObjectIdConverter(BaseConverter): """A simple converter for the RESTfull URL routing system of Flask. .. code-block:: python @app.route('/<ObjectId:task_id>') def show_task(task_id): task = db.Task.get_from_id(task_id) return render_template('task.html', task=task) It checks the validate of the id and converts it into a :class:`bson.objectid.ObjectId` object. The converter will be automatically registered by the initialization of :class:`~flask.ext.mongokit.MongoKit` with keyword :attr:`ObjectId`. """ def to_python(self, value): try: return bson.ObjectId(value) except bson.errors.InvalidId: raise abort(400) def to_url(self, value): return str(value) class Document(Document): def get_or_404(self, id): """This method get one document over the _id field. If there no document with this id then it will raised a 404 error. :param id: The id from the document. The most time there will be an :class:`bson.objectid.ObjectId`. """ doc = self.get_from_id(id) if doc is None: abort(404) else: return doc def find_one_or_404(self, *args, **kwargs): """This method get one document over normal query parameter like :meth:`~flask.ext.mongokit.Document.find_one` but if there no document then it will raise a 404 error. """ doc = self.find_one(*args, **kwargs) if doc is None: abort(404) else: return doc class MongoKit(object): """This class is used to integrate `MongoKit`_ into a Flask application. :param app: The Flask application will be bound to this MongoKit instance. If an app is not provided at initialization time than it must be provided later by calling :meth:`init_app` manually. .. _MongoKit: http://namlook.github.com/mongokit/ """ def __init__(self, app=None): #: :class:`list` of :class:`mongokit.Document` #: which will be automated registed at connection self.registered_documents = [] if app is not None: self.app = app self.init_app(self.app) else: self.app = None def init_app(self, app): """This method connect your ``app`` with this extension. Flask- MongoKit will now take care about to open and close the connection to your MongoDB. Also it registers the :class:`flask.ext.mongokit.BSONObjectIdConverter` as a converter with the key word **ObjectId**. :param app: The Flask application will be bound to this MongoKit instance. """ app.config.setdefault('MONGODB_HOST', '127.0.0.1') app.config.setdefault('MONGODB_PORT', 27017) app.config.setdefault('MONGODB_DATABASE', 'flask') app.config.setdefault('MONGODB_SLAVE_OKAY', False) app.config.setdefault('MONGODB_USERNAME', None) app.config.setdefault('MONGODB_PASSWORD', None) # 0.9 and later # no coverage check because there is everytime only one if hasattr(app, 'teardown_appcontext'): # pragma: no cover app.teardown_appcontext(self._teardown_request) # 0.7 to 0.8 elif hasattr(app, 'teardown_request'): # pragma: no cover app.teardown_request(self._teardown_request) # Older Flask versions else: # pragma: no cover app.after_request(self._teardown_request) # register extension with app only to say "I'm here" app.extensions = getattr(app, 'extensions', {}) app.extensions['mongokit'] = self app.url_map.converters['ObjectId'] = BSONObjectIdConverter self.app = app def register(self, documents): """Register one or more :class:`mongokit.Document` instances to the connection. Can be also used as a decorator on documents: .. code-block:: python db = MongoKit(app) @db.register class Task(Document): structure = { 'title': unicode, 'text': unicode, 'creation': datetime, } :param documents: A :class:`list` of :class:`mongokit.Document`. """ #enable decorator usage as in mongokit.Connection decorator = None if not isinstance(documents, (list, tuple, set, frozenset)): # we assume that the user used this as a decorator # using @register syntax or using db.register(SomeDoc) # we stock the class object in order to return it later decorator = documents documents = [documents] for document in documents: if document not in self.registered_documents: self.registered_documents.append(document) if decorator is None: return self.registered_documents else: return decorator def connect(self): """Connect to the MongoDB server and register the documents from :attr:`registered_documents`. If you set ``MONGODB_USERNAME`` and ``MONGODB_PASSWORD`` then you will be authenticated at the ``MONGODB_DATABASE``. """ if self.app is None: raise RuntimeError('The flask-mongokit extension was not init to ' 'the current application. Please make sure ' 'to call init_app() first.') ctx = ctx_stack.top mongokit_connection = getattr(ctx, 'mongokit_connection', None) if mongokit_connection is None: ctx.mongokit_connection = Connection( host=ctx.app.config.get('MONGODB_HOST'), port=ctx.app.config.get('MONGODB_PORT'), slave_okay=ctx.app.config.get('MONGODB_SLAVE_OKAY') ) ctx.mongokit_connection.register(self.registered_documents) mongokit_database = getattr(ctx, 'mongokit_database', None) if mongokit_database is None: ctx.mongokit_database = Database( ctx.mongokit_connection, ctx.app.config.get('MONGODB_DATABASE') ) if ctx.app.config.get('MONGODB_USERNAME') is not None: auth_success = ctx.mongokit_database.authenticate( ctx.app.config.get('MONGODB_USERNAME'), ctx.app.config.get('MONGODB_PASSWORD') ) if not auth_success: raise AuthenticationIncorrect @property def connected(self): """Connection status to your MongoDB.""" ctx = ctx_stack.top return getattr(ctx, 'mongokit_connection', None) is not None def disconnect(self): """Close the connection to your MongoDB.""" if self.connected: ctx = ctx_stack.top ctx.mongokit_connection.disconnect() del ctx.mongokit_connection del ctx.mongokit_database def _teardown_request(self, response): self.disconnect() return response def __getattr__(self, name, **kwargs): if not self.connected: self.connect() mongokit_database = getattr(ctx_stack.top, "mongokit_database") return getattr(mongokit_database, name) def __getitem__(self, name): if not self.connected: self.connect() mongokit_database = getattr(ctx_stack.top, "mongokit_database") return mongokit_database[name]
PypiClean
/Dabo-0.9.16.tar.gz/Dabo-0.9.16/dabo/lib/reporting_stefano/serialization/xmlserializer.py
from xml.parsers import expat from serialization import * def capname(name): return name[0].upper() + name[1:] def loname(name): return name[0].lower() + name[1:] class DeserializingParser(object): def __init__(self, rootCls): self.expectedNamesStack = [ (loname(rootCls.__name__),) ] self.rootCls = rootCls self.objStack = [] self.currentAttrName = None self.cdata = '' def StartElement(self, name, attributes): expectedNames = self.expectedNamesStack[-1] assert name in expectedNames, 'Unexpected tag %r (expecting: %s)' \ % (name, ', '.join(expectedNames)) if len(self.objStack) == 0: # root element self.rootObj = self.rootCls() self.objStack.append(self.rootObj) self.expectedNamesStack.append(self.rootObj.getExpectedNames()) else: parentObj = self.objStack[-1] attrName, attrType = parentObj.getChildObjType(name) if isinstance(attrType, SerializableAttribute): # tag represents an attribute of the object currently on top of the stack self.currentAttrName = attrName self.cdata = '' else: # tag represents a new child that should be pushed on the stack newObj = attrType.attach(parentObj, name, attrName) if len(attributes) > 0: newObj._xmlAttributes = dict(attributes) self.objStack.append(newObj) self.expectedNamesStack.append(newObj.getExpectedNames()) def EndElement(self, name): if self.currentAttrName is not None: # write the attribute to the object self.objStack[-1].srcValues[self.currentAttrName] = self.cdata self.currentAttrName = None else: # pop the child object from the stack self.objStack.pop() self.expectedNamesStack.pop() def CharacterData(self, data): self.cdata = self.cdata + data def Parse(self, xml): # Create a SAX parser Parser = expat.ParserCreate() # SAX event handlers Parser.StartElementHandler = self.StartElement Parser.EndElementHandler = self.EndElement Parser.CharacterDataHandler = self.CharacterData # Parse the XML File ParserStatus = Parser.Parse(xml, 1) return self.rootObj def ParseFromFile(self, filename): return self.Parse(open(filename,'r').read()) def deserialize(xml, rootCls): import os parser = DeserializingParser(rootCls) if "\n" not in xml and os.path.exists(xml): # argument was a file return parser.ParseFromFile(xml) else: # argument must have been raw xml: return parser.Parse(xml)
PypiClean
/Flask-KQMaps-0.4.2.tar.gz/Flask-KQMaps-0.4.2/flask_kqmaps/static/kqwebclient/leaflet/3rd_libs/Leaflet.ExtraMarkers/js/leaflet.extra-markers.min.js
!function(e,r){"object"==typeof exports&&"undefined"!=typeof module?r(exports):"function"==typeof define&&define.amd?define(["exports"],r):r((e.leaflet=e.leaflet||{},e.leaflet["extra-markers"]={}))}(this,function(e){"use strict";var r=L.ExtraMarkers={};r.version=L.ExtraMarkers.version="1.0.8",r.Icon=L.ExtraMarkers.Icon=L.Icon.extend({options:{iconSize:[35,45],iconAnchor:[17,42],popupAnchor:[1,-32],shadowAnchor:[10,12],shadowSize:[36,16],className:"",prefix:"",extraClasses:"",shape:"circle",icon:"",innerHTML:"",markerColor:"red",svgBorderColor:"#fff",svgOpacity:1,iconColor:"#fff",number:"",svg:!1},initialize:function(e){e=L.Util.setOptions(this,e)},createIcon:function(){var e=document.createElement("div"),r=this.options;return r.icon&&(e.innerHTML=this._createInner()),r.innerHTML&&(e.innerHTML=r.innerHTML),r.bgPos&&(e.style.backgroundPosition=-r.bgPos.x+"px "+-r.bgPos.y+"px"),r.svg?this._setIconStyles(e,"svg"):this._setIconStyles(e,r.shape+"-"+r.markerColor),e},_createInner:function(){var e="",r="",t=this.options;if(t.iconColor&&(e="style='color: "+t.iconColor+"' "),t.number&&(r="number='"+t.number+"' "),t.svg){var s='<svg xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 69.529271 95.44922" style="fill:'+t.markerColor+";stroke:"+t.svgBorderColor+";fill-opacity:"+t.svgOpacity+';" height="100%" width="100%" version="1.1" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/"><g transform="translate(-139.52 -173.21)"><path d="m174.28 173.21c-19.199 0.00035-34.764 15.355-34.764 34.297 0.007 6.7035 1.5591 12.813 5.7461 18.854l0.0234 0.0371 28.979 42.262 28.754-42.107c3.1982-5.8558 5.9163-11.544 6.0275-19.045-0.0001-18.942-15.565-34.298-34.766-34.297z"/></g></svg>';return"square"===t.shape&&(s='<svg xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 69.457038 96.523441" style="fill:'+t.markerColor+";stroke:"+t.svgBorderColor+";fill-opacity:"+t.svgOpacity+';" height="100%" width="100%" version="1.1" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/"><g transform="translate(-545.27 -658.39)"><path d="m545.27 658.39v65.301h22.248l12.48 31.223 12.676-31.223h22.053v-65.301h-69.457z"/></g></svg>'),"star"===t.shape&&(s='<svg style="top:0; fill:'+t.markerColor+";stroke:"+t.svgBorderColor+";fill-opacity:"+t.svgOpacity+';" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" height="100%" width="100%" version="1.1" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" viewBox="0 0 77.690999 101.4702"><g transform="translate(-101.15 -162.97)"><g transform="matrix(1 0 0 1.0165 -65.712 -150.28)"><path d="m205.97 308.16-11.561 11.561h-16.346v16.346l-11.197 11.197 11.197 11.197v15.83h15.744l11.615 33.693 11.467-33.568 0.125-0.125h16.346v-16.346l11.197-11.197-11.197-11.197v-15.83h-15.83l-11.561-11.561z"/></g></g></svg>'),"penta"===t.shape&&(s='<svg style="fill:'+t.markerColor+";stroke:"+t.svgBorderColor+";fill-opacity:"+t.svgOpacity+';" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 71.550368 96.362438" height="100%" width="100%" version="1.1" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/"><g transform="translate(-367.08 -289.9)"><path d="m367.08 322.5 17.236-32.604h36.151l18.164 32.25-35.665 64.112z"/></g></svg>'),s+"<i "+r+e+"class='"+t.extraClasses+" "+t.prefix+" "+t.icon+"'></i>"}return"<i "+r+e+"class='"+t.extraClasses+" "+t.prefix+" "+t.icon+"'></i>"},_setIconStyles:function(e,r){var t,s,o=this.options,n=L.point(o["shadow"===r?"shadowSize":"iconSize"]);"shadow"===r?(t=L.point(o.shadowAnchor||o.iconAnchor),s="shadow"):(t=L.point(o.iconAnchor),s="icon"),!t&&n&&(t=n.divideBy(2,!0)),e.className="leaflet-marker-"+s+" extra-marker extra-marker-"+r+" "+o.className,t&&(e.style.marginLeft=-t.x+"px",e.style.marginTop=-t.y+"px"),n&&(e.style.width=n.x+"px",e.style.height=n.y+"px")},createShadow:function(){var e=document.createElement("div");return this._setIconStyles(e,"shadow"),e}}),r.icon=L.ExtraMarkers.icon=function(e){return new L.ExtraMarkers.Icon(e)},e.ExtraMarkers=r,Object.defineProperty(e,"__esModule",{value:!0})});
PypiClean
/Helx-0.12.1.tar.gz/Helx-0.12.1/helx/environment/gym.py
from __future__ import annotations import re from collections import defaultdict from typing import Dict, List import gym import gym.core import gym.utils.seeding import jax import jax.numpy as jnp import numpy as np from chex import Array from gym.envs.registration import parse_env_id, registry from gym_minigrid.minigrid import MiniGridEnv from gym_minigrid.wrappers import ImgObsWrapper from gym.envs.registration import EnvSpec from ..logging import get_logger from ..mdp import Action, StepType, Timestep from ..spaces import Continuous, Space from .base import Environment logging = get_logger() class FromGymEnv(Environment[gym.Env]): """Static class to convert between gym and helx environments.""" def __init__(self, env: gym.core.Env): if isinstance(env.unwrapped, MiniGridEnv): msg = ( "String arrays are not supported by helx yet." " The `mission` field of the observations returned by" " MiniGrid environments contain string arrays." " We get rid of the `mission` field by wrapping `env`" " around an `ImgObsWrapper`." ) logging.warning(msg) env = ImgObsWrapper(env) super().__init__(env) def action_space(self) -> Space: if self._action_space is not None: return self._action_space self._action_space = Space.from_gym(self._env.action_space) return self._action_space def observation_space(self) -> Space: if self._observation_space is not None: return self._observation_space self._observation_space = Space.from_gym(self._env.observation_space) return self._observation_space def reward_space(self) -> Space: if self._reward_space is not None: return self._reward_space minimum = self._env.reward_range[0] maximum = self._env.reward_range[1] self._reward_space = Continuous((1,), (minimum,), (maximum,)) return self._reward_space def state(self) -> Array: if self._current_observation is None: raise ValueError( "Environment not initialized. Run `reset` first, to set a starting state." ) return self._current_observation def reset(self, seed: int | None = None) -> Timestep: try: obs, _ = self._env.reset(seed=seed) except TypeError: # TODO(epignatelli): remove try/except when gym3 is updated. # see: https://github.com/openai/gym3/issues/8 obs, _ = self._env.reset() self._current_observation = jnp.asarray(obs) return Timestep(obs, None, StepType.TRANSITION) def step(self, action: Action) -> Timestep: action_ = np.asarray(action) next_step = self._env.step(action_) self._current_observation = jnp.asarray(next_step[0]) return Timestep.from_gym(next_step) def seed(self, seed: int) -> None: self._env.np_random, seed = gym.utils.seeding.np_random(seed) self._seed = seed self._key = jax.random.PRNGKey(seed) def render(self, mode: str = "human"): self._env.render_mode = mode return self._env.render() def close(self) -> None: return self._env.close() def name(self) -> str: return self._env.unwrapped.__class__.__name__ def list_envs(namespace: str) -> List[str]: env_specs: Dict[str, EnvSpec] = { k: v for k, v in registry.items() if namespace.lower() in v.entry_point.lower() } return list(env_specs.keys())
PypiClean
/MirrorHerokuX-6.0.3-py3-none-any.whl/bot/modules/watch.py
from telegram.ext import CommandHandler from telegram import Bot, Update from bot import Interval, DOWNLOAD_DIR, DOWNLOAD_STATUS_UPDATE_INTERVAL, dispatcher, LOGGER from bot.helper.ext_utils.bot_utils import setInterval from bot.helper.telegram_helper.message_utils import update_all_messages, sendMessage, sendStatusMessage from .mirror import MirrorListener from bot.helper.mirror_utils.download_utils.youtube_dl_download_helper import YoutubeDLHelper from bot.helper.telegram_helper.bot_commands import BotCommands from bot.helper.telegram_helper.filters import CustomFilters import threading def _watch(bot: Bot, update, isTar=False): mssg = update.message.text message_args = mssg.split(' ') name_args = mssg.split('|') try: link = message_args[1] except IndexError: msg = f"/{BotCommands.WatchCommand} [yt_dl supported link] [quality] |[CustomName] to mirror with youtube_dl.\n\n" msg += "<b>Note :- Quality and custom name are optional</b>\n\nExample of quality :- audio, 144, 240, 360, 480, 720, 1080, 2160." msg += "\n\nIf you want to use custom filename, plz enter it after |" msg += f"\n\nExample :-\n<code>/{BotCommands.WatchCommand} https://youtu.be/ocX2FN1nguA 720 |My video bro</code>\n\n" msg += "This file will be downloaded in 720p quality and it's name will be <b>My video bro</b>" sendMessage(msg, bot, update) return try: if "|" in mssg: mssg = mssg.split("|") qual = mssg[0].split(" ")[2] if qual == "": raise IndexError else: qual = message_args[2] if qual != "audio": qual = f'bestvideo[height<={qual}]+bestaudio/best[height<={qual}]' except IndexError: qual = "bestvideo+bestaudio/best" try: name = name_args[1] except IndexError: name = "" reply_to = update.message.reply_to_message if reply_to is not None: tag = reply_to.from_user.username else: tag = None pswd = "" listener = MirrorListener(bot, update, pswd, isTar, tag) ydl = YoutubeDLHelper(listener) threading.Thread(target=ydl.add_download,args=(link, f'{DOWNLOAD_DIR}{listener.uid}', qual, name)).start() sendStatusMessage(update, bot) if len(Interval) == 0: Interval.append(setInterval(DOWNLOAD_STATUS_UPDATE_INTERVAL, update_all_messages)) def watchTar(update, context): _watch(context.bot, update, True) def watch(update, context): _watch(context.bot, update) mirror_handler = CommandHandler(BotCommands.WatchCommand, watch, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) tar_mirror_handler = CommandHandler(BotCommands.TarWatchCommand, watchTar, filters=CustomFilters.authorized_chat | CustomFilters.authorized_user, run_async=True) dispatcher.add_handler(mirror_handler) dispatcher.add_handler(tar_mirror_handler)
PypiClean
/Nuitka-1.8.tar.gz/Nuitka-1.8/nuitka/build/inline_copy/lib/scons-4.3.0/SCons/Script/SConscript.py
import SCons import SCons.Action import SCons.Builder import SCons.Defaults import SCons.Environment import SCons.Errors import SCons.Node import SCons.Node.Alias import SCons.Node.FS import SCons.Platform import SCons.SConf import SCons.Tool from SCons.Util import is_List, is_String, is_Dict, flatten from SCons.Node import SConscriptNodes from . import Main import os import os.path import re import sys import traceback import time class SConscriptReturn(Exception): pass launch_dir = os.path.abspath(os.curdir) GlobalDict = None # global exports set by Export(): global_exports = {} # chdir flag sconscript_chdir = 1 def get_calling_namespaces(): """Return the locals and globals for the function that called into this module in the current call stack.""" try: 1//0 except ZeroDivisionError: # Don't start iterating with the current stack-frame to # prevent creating reference cycles (f_back is safe). frame = sys.exc_info()[2].tb_frame.f_back # Find the first frame that *isn't* from this file. This means # that we expect all of the SCons frames that implement an Export() # or SConscript() call to be in this file, so that we can identify # the first non-Script.SConscript frame as the user's local calling # environment, and the locals and globals dictionaries from that # frame as the calling namespaces. See the comment below preceding # the DefaultEnvironmentCall block for even more explanation. while frame.f_globals.get("__name__") == __name__: frame = frame.f_back return frame.f_locals, frame.f_globals def compute_exports(exports): """Compute a dictionary of exports given one of the parameters to the Export() function or the exports argument to SConscript().""" loc, glob = get_calling_namespaces() retval = {} try: for export in exports: if is_Dict(export): retval.update(export) else: try: retval[export] = loc[export] except KeyError: retval[export] = glob[export] except KeyError as x: raise SCons.Errors.UserError("Export of non-existent variable '%s'"%x) return retval class Frame: """A frame on the SConstruct/SConscript call stack""" def __init__(self, fs, exports, sconscript): self.globals = BuildDefaultGlobals() self.retval = None self.prev_dir = fs.getcwd() self.exports = compute_exports(exports) # exports from the calling SConscript # make sure the sconscript attr is a Node. if isinstance(sconscript, SCons.Node.Node): self.sconscript = sconscript elif sconscript == '-': self.sconscript = None else: self.sconscript = fs.File(str(sconscript)) # the SConstruct/SConscript call stack: call_stack = [] # For documentation on the methods in this file, see the scons man-page def Return(*vars, **kw): retval = [] try: fvars = flatten(vars) for var in fvars: for v in var.split(): retval.append(call_stack[-1].globals[v]) except KeyError as x: raise SCons.Errors.UserError("Return of non-existent variable '%s'"%x) if len(retval) == 1: call_stack[-1].retval = retval[0] else: call_stack[-1].retval = tuple(retval) stop = kw.get('stop', True) if stop: raise SConscriptReturn stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :) def handle_missing_SConscript(f, must_exist=None): """Take appropriate action on missing file in SConscript() call. Print a warning or raise an exception on missing file, unless missing is explicitly allowed by the *must_exist* value. On first warning, print a deprecation message. Args: f (str): path of missing configuration file must_exist (bool): if true, fail. If false, but not ``None``, allow the file to be missing. The default is ``None``, which means issue the warning. The default is deprecated. Raises: UserError: if *must_exist* is true or if global :data:`SCons.Script._no_missing_sconscript` is true. """ if must_exist or (SCons.Script._no_missing_sconscript and must_exist is not False): msg = "Fatal: missing SConscript '%s'" % f.get_internal_path() raise SCons.Errors.UserError(msg) if must_exist is None: if SCons.Script._warn_missing_sconscript_deprecated: msg = ( "Calling missing SConscript without error is deprecated.\n" "Transition by adding must_exist=False to SConscript calls.\n" "Missing SConscript '%s'" % f.get_internal_path() ) SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg) SCons.Script._warn_missing_sconscript_deprecated = False else: msg = "Ignoring missing SConscript '%s'" % f.get_internal_path() SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning, msg) def _SConscript(fs, *files, **kw): top = fs.Top sd = fs.SConstruct_dir.rdir() exports = kw.get('exports', []) # evaluate each SConscript file results = [] for fn in files: call_stack.append(Frame(fs, exports, fn)) old_sys_path = sys.path try: SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1 if fn == "-": exec(sys.stdin.read(), call_stack[-1].globals) else: if isinstance(fn, SCons.Node.Node): f = fn else: f = fs.File(str(fn)) _file_ = None SConscriptNodes.add(f) # Change directory to the top of the source # tree to make sure the os's cwd and the cwd of # fs match so we can open the SConscript. fs.chdir(top, change_os_dir=1) if f.rexists(): actual = f.rfile() _file_ = open(actual.get_abspath(), "rb") elif f.srcnode().rexists(): actual = f.srcnode().rfile() _file_ = open(actual.get_abspath(), "rb") elif f.has_src_builder(): # The SConscript file apparently exists in a source # code management system. Build it, but then clear # the builder so that it doesn't get built *again* # during the actual build phase. f.build() f.built() f.builder_set(None) if f.exists(): _file_ = open(f.get_abspath(), "rb") if _file_: # Chdir to the SConscript directory. Use a path # name relative to the SConstruct file so that if # we're using the -f option, we're essentially # creating a parallel SConscript directory structure # in our local directory tree. # # XXX This is broken for multiple-repository cases # where the SConstruct and SConscript files might be # in different Repositories. For now, cross that # bridge when someone comes to it. try: src_dir = kw['src_dir'] except KeyError: ldir = fs.Dir(f.dir.get_path(sd)) else: ldir = fs.Dir(src_dir) if not ldir.is_under(f.dir): # They specified a source directory, but # it's above the SConscript directory. # Do the sensible thing and just use the # SConcript directory. ldir = fs.Dir(f.dir.get_path(sd)) try: fs.chdir(ldir, change_os_dir=sconscript_chdir) except OSError: # There was no local directory, so we should be # able to chdir to the Repository directory. # Note that we do this directly, not through # fs.chdir(), because we still need to # interpret the stuff within the SConscript file # relative to where we are logically. fs.chdir(ldir, change_os_dir=0) os.chdir(actual.dir.get_abspath()) # Append the SConscript directory to the beginning # of sys.path so Python modules in the SConscript # directory can be easily imported. sys.path = [ f.dir.get_abspath() ] + sys.path # This is the magic line that actually reads up # and executes the stuff in the SConscript file. # The locals for this frame contain the special # bottom-of-the-stack marker so that any # exceptions that occur when processing this # SConscript can base the printed frames at this # level and not show SCons internals as well. call_stack[-1].globals.update({stack_bottom:1}) old_file = call_stack[-1].globals.get('__file__') try: del call_stack[-1].globals['__file__'] except KeyError: pass try: try: if Main.print_time: start_time = time.perf_counter() scriptdata = _file_.read() scriptname = _file_.name _file_.close() exec(compile(scriptdata, scriptname, 'exec'), call_stack[-1].globals) except SConscriptReturn: pass finally: if Main.print_time: elapsed = time.perf_counter() - start_time print('SConscript:%s took %0.3f ms' % (f.get_abspath(), elapsed * 1000.0)) if old_file is not None: call_stack[-1].globals.update({__file__:old_file}) else: handle_missing_SConscript(f, kw.get('must_exist', None)) finally: SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1 sys.path = old_sys_path frame = call_stack.pop() try: fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir) except OSError: # There was no local directory, so chdir to the # Repository directory. Like above, we do this # directly. fs.chdir(frame.prev_dir, change_os_dir=0) rdir = frame.prev_dir.rdir() rdir._create() # Make sure there's a directory there. try: os.chdir(rdir.get_abspath()) except OSError as e: # We still couldn't chdir there, so raise the error, # but only if actions are being executed. # # If the -n option was used, the directory would *not* # have been created and we should just carry on and # let things muddle through. This isn't guaranteed # to work if the SConscript files are reading things # from disk (for example), but it should work well # enough for most configurations. if SCons.Action.execute_actions: raise e results.append(frame.retval) # if we only have one script, don't return a tuple if len(results) == 1: return results[0] else: return tuple(results) def SConscript_exception(file=sys.stderr): """Print an exception stack trace just for the SConscript file(s). This will show users who have Python errors where the problem is, without cluttering the output with all of the internal calls leading up to where we exec the SConscript.""" exc_type, exc_value, exc_tb = sys.exc_info() tb = exc_tb while tb and stack_bottom not in tb.tb_frame.f_locals: tb = tb.tb_next if not tb: # We did not find our exec statement, so this was actually a bug # in SCons itself. Show the whole stack. tb = exc_tb stack = traceback.extract_tb(tb) try: type = exc_type.__name__ except AttributeError: type = str(exc_type) if type[:11] == "exceptions.": type = type[11:] file.write('%s: %s:\n' % (type, exc_value)) for fname, line, func, text in stack: file.write(' File "%s", line %d:\n' % (fname, line)) file.write(' %s\n' % text) def annotate(node): """Annotate a node with the stack frame describing the SConscript file and line number that created it.""" tb = sys.exc_info()[2] while tb and stack_bottom not in tb.tb_frame.f_locals: tb = tb.tb_next if not tb: # We did not find any exec of an SConscript file: what?! raise SCons.Errors.InternalError("could not find SConscript stack frame") node.creator = traceback.extract_stack(tb)[0] # The following line would cause each Node to be annotated using the # above function. Unfortunately, this is a *huge* performance hit, so # leave this disabled until we find a more efficient mechanism. #SCons.Node.Annotate = annotate class SConsEnvironment(SCons.Environment.Base): """An Environment subclass that contains all of the methods that are particular to the wrapper SCons interface and which aren't (or shouldn't be) part of the build engine itself. Note that not all of the methods of this class have corresponding global functions, there are some private methods. """ # # Private methods of an SConsEnvironment. # def _exceeds_version(self, major, minor, v_major, v_minor): """Return 1 if 'major' and 'minor' are greater than the version in 'v_major' and 'v_minor', and 0 otherwise.""" return (major > v_major or (major == v_major and minor > v_minor)) def _get_major_minor_revision(self, version_string): """Split a version string into major, minor and (optionally) revision parts. This is complicated by the fact that a version string can be something like 3.2b1.""" version = version_string.split(' ')[0].split('.') v_major = int(version[0]) v_minor = int(re.match(r'\d+', version[1]).group()) if len(version) >= 3: v_revision = int(re.match(r'\d+', version[2]).group()) else: v_revision = 0 return v_major, v_minor, v_revision def _get_SConscript_filenames(self, ls, kw): """ Convert the parameters passed to SConscript() calls into a list of files and export variables. If the parameters are invalid, throws SCons.Errors.UserError. Returns a tuple (l, e) where l is a list of SConscript filenames and e is a list of exports. """ exports = [] if len(ls) == 0: try: dirs = kw["dirs"] except KeyError: raise SCons.Errors.UserError("Invalid SConscript usage - no parameters") if not is_List(dirs): dirs = [ dirs ] dirs = list(map(str, dirs)) name = kw.get('name', 'SConscript') files = [os.path.join(n, name) for n in dirs] elif len(ls) == 1: files = ls[0] elif len(ls) == 2: files = ls[0] exports = self.Split(ls[1]) else: raise SCons.Errors.UserError("Invalid SConscript() usage - too many arguments") if not is_List(files): files = [ files ] if kw.get('exports'): exports.extend(self.Split(kw['exports'])) variant_dir = kw.get('variant_dir') if variant_dir: if len(files) != 1: raise SCons.Errors.UserError("Invalid SConscript() usage - can only specify one SConscript with a variant_dir") duplicate = kw.get('duplicate', 1) src_dir = kw.get('src_dir') if not src_dir: src_dir, fname = os.path.split(str(files[0])) files = [os.path.join(str(variant_dir), fname)] else: if not isinstance(src_dir, SCons.Node.Node): src_dir = self.fs.Dir(src_dir) fn = files[0] if not isinstance(fn, SCons.Node.Node): fn = self.fs.File(fn) if fn.is_under(src_dir): # Get path relative to the source directory. fname = fn.get_path(src_dir) files = [os.path.join(str(variant_dir), fname)] else: files = [fn.get_abspath()] kw['src_dir'] = variant_dir self.fs.VariantDir(variant_dir, src_dir, duplicate) return (files, exports) # # Public methods of an SConsEnvironment. These get # entry points in the global namespace so they can be called # as global functions. # def Configure(self, *args, **kw): if not SCons.Script.sconscript_reading: raise SCons.Errors.UserError("Calling Configure from Builders is not supported.") kw['_depth'] = kw.get('_depth', 0) + 1 return SCons.Environment.Base.Configure(self, *args, **kw) def Default(self, *targets): SCons.Script._Set_Default_Targets(self, targets) def EnsureSConsVersion(self, major, minor, revision=0): """Exit abnormally if the SCons version is not late enough.""" # split string to avoid replacement during build process if SCons.__version__ == '__' + 'VERSION__': SCons.Warnings.warn(SCons.Warnings.DevelopmentVersionWarning, "EnsureSConsVersion is ignored for development version") return scons_ver = self._get_major_minor_revision(SCons.__version__) if scons_ver < (major, minor, revision): if revision: scons_ver_string = '%d.%d.%d' % (major, minor, revision) else: scons_ver_string = '%d.%d' % (major, minor) print("SCons %s or greater required, but you have SCons %s" % \ (scons_ver_string, SCons.__version__)) sys.exit(2) def EnsurePythonVersion(self, major, minor): """Exit abnormally if the Python version is not late enough.""" if sys.version_info < (major, minor): v = sys.version.split()[0] print("Python %d.%d or greater required, but you have Python %s" %(major,minor,v)) sys.exit(2) def Exit(self, value=0): sys.exit(value) def Export(self, *vars, **kw): for var in vars: global_exports.update(compute_exports(self.Split(var))) global_exports.update(kw) def GetLaunchDir(self): global launch_dir return launch_dir def GetOption(self, name): name = self.subst(name) return SCons.Script.Main.GetOption(name) def Help(self, text, append=False): text = self.subst(text, raw=1) SCons.Script.HelpFunction(text, append=append) def Import(self, *vars): try: frame = call_stack[-1] globals = frame.globals exports = frame.exports for var in vars: var = self.Split(var) for v in var: if v == '*': globals.update(global_exports) globals.update(exports) else: if v in exports: globals[v] = exports[v] else: globals[v] = global_exports[v] except KeyError as x: raise SCons.Errors.UserError("Import of non-existent variable '%s'"%x) def SConscript(self, *ls, **kw): """Execute SCons configuration files. Parameters: *ls (str or list): configuration file(s) to execute. Keyword arguments: dirs (list): execute SConscript in each listed directory. name (str): execute script 'name' (used only with 'dirs'). exports (list or dict): locally export variables the called script(s) can import. variant_dir (str): mirror sources needed for the build in a variant directory to allow building in it. duplicate (bool): physically duplicate sources instead of just adjusting paths of derived files (used only with 'variant_dir') (default is True). must_exist (bool): fail if a requested script is missing (default is False, default is deprecated). Returns: list of variables returned by the called script Raises: UserError: a script is not found and such exceptions are enabled. """ def subst_element(x, subst=self.subst): if SCons.Util.is_List(x): x = list(map(subst, x)) else: x = subst(x) return x ls = list(map(subst_element, ls)) subst_kw = {} for key, val in kw.items(): if is_String(val): val = self.subst(val) elif SCons.Util.is_List(val): val = [self.subst(v) if is_String(v) else v for v in val] subst_kw[key] = val files, exports = self._get_SConscript_filenames(ls, subst_kw) subst_kw['exports'] = exports return _SConscript(self.fs, *files, **subst_kw) def SConscriptChdir(self, flag): global sconscript_chdir sconscript_chdir = flag def SetOption(self, name, value): name = self.subst(name) SCons.Script.Main.SetOption(name, value) # # # SCons.Environment.Environment = SConsEnvironment def Configure(*args, **kw): if not SCons.Script.sconscript_reading: raise SCons.Errors.UserError("Calling Configure from Builders is not supported.") kw['_depth'] = 1 return SCons.SConf.SConf(*args, **kw) # It's very important that the DefaultEnvironmentCall() class stay in this # file, with the get_calling_namespaces() function, the compute_exports() # function, the Frame class and the SConsEnvironment.Export() method. # These things make up the calling stack leading up to the actual global # Export() or SConscript() call that the user issued. We want to allow # users to export local variables that they define, like so: # # def func(): # x = 1 # Export('x') # # To support this, the get_calling_namespaces() function assumes that # the *first* stack frame that's not from this file is the local frame # for the Export() or SConscript() call. _DefaultEnvironmentProxy = None def get_DefaultEnvironmentProxy(): global _DefaultEnvironmentProxy if not _DefaultEnvironmentProxy: default_env = SCons.Defaults.DefaultEnvironment() _DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env) return _DefaultEnvironmentProxy class DefaultEnvironmentCall: """A class that implements "global function" calls of Environment methods by fetching the specified method from the DefaultEnvironment's class. Note that this uses an intermediate proxy class instead of calling the DefaultEnvironment method directly so that the proxy can override the subst() method and thereby prevent expansion of construction variables (since from the user's point of view this was called as a global function, with no associated construction environment).""" def __init__(self, method_name, subst=0): self.method_name = method_name if subst: self.factory = SCons.Defaults.DefaultEnvironment else: self.factory = get_DefaultEnvironmentProxy def __call__(self, *args, **kw): env = self.factory() method = getattr(env, self.method_name) return method(*args, **kw) def BuildDefaultGlobals(): """ Create a dictionary containing all the default globals for SConstruct and SConscript files. """ global GlobalDict if GlobalDict is None: GlobalDict = {} import SCons.Script d = SCons.Script.__dict__ def not_a_module(m, d=d, mtype=type(SCons.Script)): return not isinstance(d[m], mtype) for m in filter(not_a_module, dir(SCons.Script)): GlobalDict[m] = d[m] return GlobalDict.copy() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
PypiClean
/CCC-2.0.1.tar.gz/CCC-2.0.1/ccc/digital_assets/models.py
from django.conf import settings from django.db import models from django.urls import reverse class DigitalVideo(models.Model): user = models.ForeignKey(settings.ACCOUNT_USER_PROXY_MODEL, on_delete=models.CASCADE) video = models.FileField(null=True, blank=True, upload_to="digital_assets/video") uploaded_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) video_name = models.CharField(max_length=100, null=True) def __str__(self): return self.video_name def get_absolute_url(self): return reverse("srm:digital-assets:video_update_url", args=([self.id])) def get_delete_url(self): return reverse("srm:digital-assets:video_delete_url", args=([self.id])) class DigitalAudio(models.Model): user = models.ForeignKey(settings.ACCOUNT_USER_PROXY_MODEL, on_delete=models.CASCADE) audio = models.FileField(null=True, blank=True, upload_to="digital_assets/audio") uploaded_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) audio_name = models.CharField(max_length=100, null=True) def __str__(self): return self.audio_name def get_absolute_url(self): return reverse("srm:digital-assets:audio_update_url", args=([self.id])) def get_delete_url(self): return reverse("srm:digital-assets:audio_delete_url", args=([self.id])) class DigitalImage(models.Model): user = models.ForeignKey(settings.ACCOUNT_USER_PROXY_MODEL, on_delete=models.CASCADE) image = models.ImageField(null=True, blank=True, upload_to="digital_assets/pictures") uploaded_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) image_name = models.CharField(max_length=100, null=True) class Meta: ordering = ['-id'] def __str__(self): return self.image_name def get_absolute_url(self): return reverse("srm:digital-assets:image_update_url", args=([self.id])) def get_delete_url(self): return reverse("srm:digital-assets:image_delete_url", args=([self.id])) class DigitalAttachment(models.Model): user = models.ForeignKey(settings.ACCOUNT_USER_PROXY_MODEL, on_delete=models.CASCADE) attachment = models.FileField(null=True, blank=True, upload_to="digital_assets/attachments") uploaded_at = models.DateTimeField(auto_now_add=True, null=True, blank=True) attachment_name = models.CharField(max_length=100, null=True) def __str__(self): return self.attachment_name def get_absolute_url(self): return reverse("srm:digital-assets:attachment_update_url", args=([self.id])) def get_delete_url(self): return reverse("srm:digital-assets:attachment_delete_url", args=([self.id]))
PypiClean
/FlexGet-3.9.6-py3-none-any.whl/flexget/webserver.py
import hashlib import random import socket import threading from typing import Dict, Optional, Tuple import cherrypy import zxcvbn from flask import Flask, abort, redirect from flask_login import UserMixin from loguru import logger from sqlalchemy import Column, Integer, Unicode from werkzeug.security import generate_password_hash from flexget.manager import Base from flexget.utils.database import with_session logger = logger.bind(name='web_server') _home: Optional[str] = None _app_register: Dict[str, Tuple[Flask, str]] = {} _default_app = Flask(__name__) rand = random.SystemRandom() def generate_key(): """Generate key for use to authentication""" return str(hashlib.sha224(str(rand.getrandbits(128)).encode('utf-8')).hexdigest()) def get_random_string( length=12, allowed_chars='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' ): """ Returns a securely generated random string. The default length of 12 with the a-z, A-Z, 0-9 character set returns a 71-bit value. log_2((26+26+10)^12) =~ 71 bits. Taken from the django.utils.crypto module. """ return ''.join(rand.choice(allowed_chars) for __ in range(length)) @with_session def get_secret(session=None): """Generate a secret key for flask applications and store it in the database.""" web_secret = session.query(WebSecret).first() if not web_secret: web_secret = WebSecret( id='1', value=get_random_string(50, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'), ) session.add(web_secret) session.commit() return web_secret.value class WeakPassword(Exception): def __init__(self, value, logger=logger, **kwargs): super().__init__() # Value is expected to be a string if not isinstance(value, str): value = str(value) self.value = value self.logger = logger self.kwargs = kwargs def __str__(self): return str(self).encode('utf-8') def __unicode__(self): return str(self.value) class User(Base, UserMixin): """User class available for flask apps to handle authentication using flask_login""" __tablename__ = 'users' id = Column(Integer, primary_key=True) name = Column(Unicode(50), unique=True) token = Column(Unicode, default=generate_key) password = Column(Unicode) def __repr__(self): return '<User %r>' % self.name def get_id(self): return self.name class WebSecret(Base): """Store flask secret in the database""" __tablename__ = 'secret' id = Column(Unicode, primary_key=True) value = Column(Unicode) def register_app(path, application, name): if path in _app_register: raise ValueError('path %s already registered' % path) _app_register[path] = (application, name) def register_home(route): """Registers UI home page""" global _home _home = route @_default_app.route('/') def start_page(): """Redirect user to registered UI home""" if not _home: abort(404) return redirect(_home) def setup_server(config): """Sets up and starts/restarts the web service.""" web_server = WebServer( bind=config['bind'], port=config['port'], ssl_certificate=config['ssl_certificate'], ssl_private_key=config['ssl_private_key'], base_url=config['base_url'], ) _default_app.secret_key = get_secret() user = get_user() if not user or not user.password: logger.warning( 'No password set for web server, create one by using' ' `flexget web passwd <password>`' ) if _app_register: web_server.start() return web_server class WebServer(threading.Thread): # We use a regular list for periodic jobs, so you must hold this lock while using it triggers_lock = threading.Lock() def __init__( self, bind='0.0.0.0', port=5050, ssl_certificate=None, ssl_private_key=None, base_url='' ): threading.Thread.__init__(self, name='web_server') self.bind = str(bind) # String to remove unicode warning from cherrypy startup self.port = port self.ssl_certificate = ssl_certificate self.ssl_private_key = ssl_private_key self.base_url = base_url def start(self): # If we have already started and stopped a thread, we need to reinitialize it to create a new one if not self.is_alive(): self.__init__( bind=self.bind, port=self.port, ssl_certificate=self.ssl_certificate, ssl_private_key=self.ssl_private_key, base_url=self.base_url, ) threading.Thread.start(self) def _start_server(self): # Mount the WSGI callable object (app) on the root directory cherrypy.tree.graft(_default_app, '/') for path, (registered_app, _name) in _app_register.items(): cherrypy.tree.graft(registered_app, self.base_url + path) cherrypy.log.error_log.propagate = False cherrypy.log.access_log.propagate = False # Set the configuration of the web server cherrypy.config.update( { 'engine.autoreload.on': False, 'server.socket_port': self.port, 'server.socket_host': self.bind, 'log.screen': False, } ) if self.ssl_certificate and self.ssl_private_key: cherrypy.config.update( { 'server.ssl_module': 'builtin', 'server.ssl_certificate': self.ssl_certificate, 'server.ssl_private_key': self.ssl_private_key, } ) try: host = ( self.bind if self.bind != "0.0.0.0" else socket.gethostbyname(socket.gethostname()) ) except socket.gaierror: host = '127.0.0.1' protocol = 'https' if self.ssl_certificate and self.ssl_private_key else 'http' server_url = f'{protocol}://{host}:{self.port}{self.base_url}' logger.info('Web server started at {}', server_url) for path, (registered_app, name) in _app_register.items(): logger.info('{} available at {}{}', name, server_url, path) # Start the CherryPy WSGI web server cherrypy.engine.start() cherrypy.engine.block() def run(self): self._start_server() def stop(self): global _app_register logger.info('Shutting down web server') cherrypy.engine.exit() # Unregister apps _app_register = {} @with_session def get_user(username='flexget', session=None): user = session.query(User).filter(User.name == username).first() if not user: user = User() user.name = username session.add(user) return user @with_session def change_password(username='flexget', password='', session=None): check = zxcvbn.zxcvbn(password, user_inputs=[username]) if check['score'] < 3: warning = check['feedback']['warning'] suggestions = ' '.join(check['feedback']['suggestions']) message = f'Password \'{password}\' is not strong enough. ' if warning: message += warning + ' ' if suggestions: message += f'Suggestions: {suggestions}' raise WeakPassword(message) user = get_user(username=username, session=session) user.password = str(generate_password_hash(password)) session.commit() @with_session def generate_token(username='flexget', session=None): user = get_user(username=username, session=session) user.token = generate_key() session.commit() return user.token
PypiClean
/DVDev-0.1.3.tar.gz/DVDev-0.1.3/dvdev/public/js/development-bundle/ui/i18n/jquery-ui-i18n.js
/* Arabic Translation for jQuery UI date picker plugin. */ /* Khaled Al Horani -- koko.dw@gmail.com */ /* خالد الحوراني -- koko.dw@gmail.com */ /* NOTE: monthNames are the original months names and they are the Arabic names, not the new months name فبراير - يناير and there isn't any Arabic roots for these months */ jQuery(function($){ $.datepicker.regional['ar'] = { closeText: 'إغلاق', prevText: '&#x3c;السابق', nextText: 'التالي&#x3e;', currentText: 'اليوم', monthNames: ['كانون الثاني', 'شباط', 'آذار', 'نيسان', 'آذار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول'], monthNamesShort: ['1','2','3','4','5','6','7','8','9','10','11','12'], dayNames: ['السبت', 'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة'], dayNamesShort: ['سبت', 'أحد', 'اثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة'], dayNamesMin: ['سبت', 'أحد', 'اثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: true}; $.datepicker.setDefaults($.datepicker.regional['ar']); });/* Bulgarian initialisation for the jQuery UI date picker plugin. */ /* Written by Stoyan Kyosev (http://svest.org). */ jQuery(function($){ $.datepicker.regional['bg'] = { closeText: 'затвори', prevText: '&#x3c;назад', nextText: 'напред&#x3e;', nextBigText: '&#x3e;&#x3e;', currentText: 'днес', monthNames: ['Януари','Февруари','Март','Април','Май','Юни', 'Юли','Август','Септември','Октомври','Ноември','Декември'], monthNamesShort: ['Яну','Фев','Мар','Апр','Май','Юни', 'Юли','Авг','Сеп','Окт','Нов','Дек'], dayNames: ['Неделя','Понеделник','Вторник','Сряда','Четвъртък','Петък','Събота'], dayNamesShort: ['Нед','Пон','Вто','Сря','Чет','Пет','Съб'], dayNamesMin: ['Не','По','Вт','Ср','Че','Пе','Съ'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['bg']); }); /* Inicialitzaci� en catal� per a l'extenci� 'calendar' per jQuery. */ /* Writers: (joan.leon@gmail.com). */ jQuery(function($){ $.datepicker.regional['ca'] = { closeText: 'Tancar', prevText: '&#x3c;Ant', nextText: 'Seg&#x3e;', currentText: 'Avui', monthNames: ['Gener','Febrer','Mar&ccedil;','Abril','Maig','Juny', 'Juliol','Agost','Setembre','Octubre','Novembre','Desembre'], monthNamesShort: ['Gen','Feb','Mar','Abr','Mai','Jun', 'Jul','Ago','Set','Oct','Nov','Des'], dayNames: ['Diumenge','Dilluns','Dimarts','Dimecres','Dijous','Divendres','Dissabte'], dayNamesShort: ['Dug','Dln','Dmt','Dmc','Djs','Dvn','Dsb'], dayNamesMin: ['Dg','Dl','Dt','Dc','Dj','Dv','Ds'], dateFormat: 'mm/dd/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['ca']); });/* Czech initialisation for the jQuery UI date picker plugin. */ /* Written by Tomas Muller (tomas@tomas-muller.net). */ jQuery(function($){ $.datepicker.regional['cs'] = { closeText: 'Zavřít', prevText: '&#x3c;Dříve', nextText: 'Později&#x3e;', currentText: 'Nyní', monthNames: ['leden','únor','březen','duben','květen','červen', 'červenec','srpen','září','říjen','listopad','prosinec'], monthNamesShort: ['led','úno','bře','dub','kvě','čer', 'čvc','srp','zář','říj','lis','pro'], dayNames: ['neděle', 'pondělí', 'úterý', 'středa', 'čtvrtek', 'pátek', 'sobota'], dayNamesShort: ['ne', 'po', 'út', 'st', 'čt', 'pá', 'so'], dayNamesMin: ['ne','po','út','st','čt','pá','so'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['cs']); }); /* Danish initialisation for the jQuery UI date picker plugin. */ /* Written by Jan Christensen ( deletestuff@gmail.com). */ jQuery(function($){ $.datepicker.regional['da'] = { closeText: 'Luk', prevText: '&#x3c;Forrige', nextText: 'Næste&#x3e;', currentText: 'Idag', monthNames: ['Januar','Februar','Marts','April','Maj','Juni', 'Juli','August','September','Oktober','November','December'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Aug','Sep','Okt','Nov','Dec'], dayNames: ['Søndag','Mandag','Tirsdag','Onsdag','Torsdag','Fredag','Lørdag'], dayNamesShort: ['Søn','Man','Tir','Ons','Tor','Fre','Lør'], dayNamesMin: ['Sø','Ma','Ti','On','To','Fr','Lø'], dateFormat: 'dd-mm-yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['da']); }); /* German initialisation for the jQuery UI date picker plugin. */ /* Written by Milian Wolff (mail@milianw.de). */ jQuery(function($){ $.datepicker.regional['de'] = { closeText: 'schließen', prevText: '&#x3c;zurück', nextText: 'Vor&#x3e;', currentText: 'heute', monthNames: ['Januar','Februar','März','April','Mai','Juni', 'Juli','August','September','Oktober','November','Dezember'], monthNamesShort: ['Jan','Feb','Mär','Apr','Mai','Jun', 'Jul','Aug','Sep','Okt','Nov','Dez'], dayNames: ['Sonntag','Montag','Dienstag','Mittwoch','Donnerstag','Freitag','Samstag'], dayNamesShort: ['So','Mo','Di','Mi','Do','Fr','Sa'], dayNamesMin: ['So','Mo','Di','Mi','Do','Fr','Sa'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['de']); }); /* Greek (el) initialisation for the jQuery UI date picker plugin. */ /* Written by Alex Cicovic (http://www.alexcicovic.com) */ jQuery(function($){ $.datepicker.regional['el'] = { closeText: 'Κλείσιμο', prevText: 'Προηγούμενος', nextText: 'Επόμενος', currentText: 'Τρέχων Μήνας', monthNames: ['Ιανουάριος','Φεβρουάριος','Μάρτιος','Απρίλιος','Μάιος','Ιούνιος', 'Ιούλιος','Αύγουστος','Σεπτέμβριος','Οκτώβριος','Νοέμβριος','Δεκέμβριος'], monthNamesShort: ['Ιαν','Φεβ','Μαρ','Απρ','Μαι','Ιουν', 'Ιουλ','Αυγ','Σεπ','Οκτ','Νοε','Δεκ'], dayNames: ['Κυριακή','Δευτέρα','Τρίτη','Τετάρτη','Πέμπτη','Παρασκευή','Σάββατο'], dayNamesShort: ['Κυρ','Δευ','Τρι','Τετ','Πεμ','Παρ','Σαβ'], dayNamesMin: ['Κυ','Δε','Τρ','Τε','Πε','Πα','Σα'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['el']); });/* Esperanto initialisation for the jQuery UI date picker plugin. */ /* Written by Olivier M. (olivierweb@ifrance.com). */ jQuery(function($){ $.datepicker.regional['eo'] = { closeText: 'Fermi', prevText: '&lt;Anta', nextText: 'Sekv&gt;', currentText: 'Nuna', monthNames: ['Januaro','Februaro','Marto','Aprilo','Majo','Junio', 'Julio','Aŭgusto','Septembro','Oktobro','Novembro','Decembro'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Aŭg','Sep','Okt','Nov','Dec'], dayNames: ['Dimanĉo','Lundo','Mardo','Merkredo','Ĵaŭdo','Vendredo','Sabato'], dayNamesShort: ['Dim','Lun','Mar','Mer','Ĵaŭ','Ven','Sab'], dayNamesMin: ['Di','Lu','Ma','Me','Ĵa','Ve','Sa'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['eo']); }); /* Inicializaci�n en espa�ol para la extensi�n 'UI date picker' para jQuery. */ /* Traducido por Vester (xvester@gmail.com). */ jQuery(function($){ $.datepicker.regional['es'] = { closeText: 'Cerrar', prevText: '&#x3c;Ant', nextText: 'Sig&#x3e;', currentText: 'Hoy', monthNames: ['Enero','Febrero','Marzo','Abril','Mayo','Junio', 'Julio','Agosto','Septiembre','Octubre','Noviembre','Diciembre'], monthNamesShort: ['Ene','Feb','Mar','Abr','May','Jun', 'Jul','Ago','Sep','Oct','Nov','Dic'], dayNames: ['Domingo','Lunes','Martes','Mi&eacute;rcoles','Jueves','Viernes','S&aacute;bado'], dayNamesShort: ['Dom','Lun','Mar','Mi&eacute;','Juv','Vie','S&aacute;b'], dayNamesMin: ['Do','Lu','Ma','Mi','Ju','Vi','S&aacute;'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['es']); });/* Persian (Farsi) Translation for the jQuery UI date picker plugin. */ /* Javad Mowlanezhad -- jmowla@gmail.com */ /* Jalali calendar should supported soon! (Its implemented but I have to test it) */ jQuery(function($) { $.datepicker.regional['fa'] = { closeText: 'بستن', prevText: '&#x3c;قبلي', nextText: 'بعدي&#x3e;', currentText: 'امروز', monthNames: ['فروردين','ارديبهشت','خرداد','تير','مرداد','شهريور', 'مهر','آبان','آذر','دي','بهمن','اسفند'], monthNamesShort: ['1','2','3','4','5','6','7','8','9','10','11','12'], dayNames: ['يکشنبه','دوشنبه','سه‌شنبه','چهارشنبه','پنجشنبه','جمعه','شنبه'], dayNamesShort: ['ي','د','س','چ','پ','ج', 'ش'], dayNamesMin: ['ي','د','س','چ','پ','ج', 'ش'], dateFormat: 'yy/mm/dd', firstDay: 6, isRTL: true}; $.datepicker.setDefaults($.datepicker.regional['fa']); });/* Finnish initialisation for the jQuery UI date picker plugin. */ /* Written by Harri Kilpi� (harrikilpio@gmail.com). */ jQuery(function($){ $.datepicker.regional['fi'] = { closeText: 'Sulje', prevText: '&laquo;Edellinen', nextText: 'Seuraava&raquo;', currentText: 'T&auml;n&auml;&auml;n', monthNames: ['Tammikuu','Helmikuu','Maaliskuu','Huhtikuu','Toukokuu','Kes&auml;kuu', 'Hein&auml;kuu','Elokuu','Syyskuu','Lokakuu','Marraskuu','Joulukuu'], monthNamesShort: ['Tammi','Helmi','Maalis','Huhti','Touko','Kes&auml;', 'Hein&auml;','Elo','Syys','Loka','Marras','Joulu'], dayNamesShort: ['Su','Ma','Ti','Ke','To','Pe','Su'], dayNames: ['Sunnuntai','Maanantai','Tiistai','Keskiviikko','Torstai','Perjantai','Lauantai'], dayNamesMin: ['Su','Ma','Ti','Ke','To','Pe','La'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['fi']); }); /* French initialisation for the jQuery UI date picker plugin. */ /* Written by Keith Wood (kbwood@virginbroadband.com.au) and Stéphane Nahmani (sholby@sholby.net). */ jQuery(function($){ $.datepicker.regional['fr'] = { closeText: 'Fermer', prevText: '&#x3c;Préc', nextText: 'Suiv&#x3e;', currentText: 'Courant', monthNames: ['Janvier','Février','Mars','Avril','Mai','Juin', 'Juillet','Août','Septembre','Octobre','Novembre','Décembre'], monthNamesShort: ['Jan','Fév','Mar','Avr','Mai','Jun', 'Jul','Aoû','Sep','Oct','Nov','Déc'], dayNames: ['Dimanche','Lundi','Mardi','Mercredi','Jeudi','Vendredi','Samedi'], dayNamesShort: ['Dim','Lun','Mar','Mer','Jeu','Ven','Sam'], dayNamesMin: ['Di','Lu','Ma','Me','Je','Ve','Sa'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['fr']); });/* Hebrew initialisation for the UI Datepicker extension. */ /* Written by Amir Hardon (ahardon at gmail dot com). */ jQuery(function($){ $.datepicker.regional['he'] = { closeText: 'סגור', prevText: '&#x3c;הקודם', nextText: 'הבא&#x3e;', currentText: 'היום', monthNames: ['ינואר','פברואר','מרץ','אפריל','מאי','יוני', 'יולי','אוגוסט','ספטמבר','אוקטובר','נובמבר','דצמבר'], monthNamesShort: ['1','2','3','4','5','6', '7','8','9','10','11','12'], dayNames: ['ראשון','שני','שלישי','רביעי','חמישי','שישי','שבת'], dayNamesShort: ['א\'','ב\'','ג\'','ד\'','ה\'','ו\'','שבת'], dayNamesMin: ['א\'','ב\'','ג\'','ד\'','ה\'','ו\'','שבת'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: true}; $.datepicker.setDefaults($.datepicker.regional['he']); }); /* Croatian i18n for the jQuery UI date picker plugin. */ /* Written by Vjekoslav Nesek. */ jQuery(function($){ $.datepicker.regional['hr'] = { closeText: 'Zatvori', prevText: '&#x3c;', nextText: '&#x3e;', currentText: 'Danas', monthNames: ['Siječanj','Veljača','Ožujak','Travanj','Svibanj','Lipani', 'Srpanj','Kolovoz','Rujan','Listopad','Studeni','Prosinac'], monthNamesShort: ['Sij','Velj','Ožu','Tra','Svi','Lip', 'Srp','Kol','Ruj','Lis','Stu','Pro'], dayNames: ['Nedjalja','Ponedjeljak','Utorak','Srijeda','Četvrtak','Petak','Subota'], dayNamesShort: ['Ned','Pon','Uto','Sri','Čet','Pet','Sub'], dayNamesMin: ['Ne','Po','Ut','Sr','Če','Pe','Su'], dateFormat: 'dd.mm.yy.', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['hr']); });/* Hungarian initialisation for the jQuery UI date picker plugin. */ /* Written by Istvan Karaszi (jquerycalendar@spam.raszi.hu). */ jQuery(function($){ $.datepicker.regional['hu'] = { closeText: 'bezárás', prevText: '&laquo;&nbsp;vissza', nextText: 'előre&nbsp;&raquo;', currentText: 'ma', monthNames: ['Január', 'Február', 'Március', 'Április', 'Május', 'Június', 'Július', 'Augusztus', 'Szeptember', 'Október', 'November', 'December'], monthNamesShort: ['Jan', 'Feb', 'Már', 'Ápr', 'Máj', 'Jún', 'Júl', 'Aug', 'Szep', 'Okt', 'Nov', 'Dec'], dayNames: ['Vasámap', 'Hétfö', 'Kedd', 'Szerda', 'Csütörtök', 'Péntek', 'Szombat'], dayNamesShort: ['Vas', 'Hét', 'Ked', 'Sze', 'Csü', 'Pén', 'Szo'], dayNamesMin: ['V', 'H', 'K', 'Sze', 'Cs', 'P', 'Szo'], dateFormat: 'yy-mm-dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['hu']); }); /* Armenian(UTF-8) initialisation for the jQuery UI date picker plugin. */ /* Written by Levon Zakaryan (levon.zakaryan@gmail.com)*/ jQuery(function($){ $.datepicker.regional['hy'] = { closeText: 'Փակել', prevText: '&#x3c;Նախ.', nextText: 'Հաջ.&#x3e;', currentText: 'Այսօր', monthNames: ['Հունվար','Փետրվար','Մարտ','Ապրիլ','Մայիս','Հունիս', 'Հուլիս','Օգոստոս','Սեպտեմբեր','Հոկտեմբեր','Նոյեմբեր','Դեկտեմբեր'], monthNamesShort: ['Հունվ','Փետր','Մարտ','Ապր','Մայիս','Հունիս', 'Հուլ','Օգս','Սեպ','Հոկ','Նոյ','Դեկ'], dayNames: ['կիրակի','եկուշաբթի','երեքշաբթի','չորեքշաբթի','հինգշաբթի','ուրբաթ','շաբաթ'], dayNamesShort: ['կիր','երկ','երք','չրք','հնգ','ուրբ','շբթ'], dayNamesMin: ['կիր','երկ','երք','չրք','հնգ','ուրբ','շբթ'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['hy']); });/* Indonesian initialisation for the jQuery UI date picker plugin. */ /* Written by Deden Fathurahman (dedenf@gmail.com). */ jQuery(function($){ $.datepicker.regional['id'] = { closeText: 'Tutup', prevText: '&#x3c;mundur', nextText: 'maju&#x3e;', currentText: 'hari ini', monthNames: ['Januari','Februari','Maret','April','Mei','Juni', 'Juli','Agustus','September','Oktober','Nopember','Desember'], monthNamesShort: ['Jan','Feb','Mar','Apr','Mei','Jun', 'Jul','Agus','Sep','Okt','Nop','Des'], dayNames: ['Minggu','Senin','Selasa','Rabu','Kamis','Jumat','Sabtu'], dayNamesShort: ['Min','Sen','Sel','Rab','kam','Jum','Sab'], dayNamesMin: ['Mg','Sn','Sl','Rb','Km','jm','Sb'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['id']); });/* Icelandic initialisation for the jQuery UI date picker plugin. */ /* Written by Haukur H. Thorsson (haukur@eskill.is). */ jQuery(function($){ $.datepicker.regional['is'] = { closeText: 'Loka', prevText: '&#x3c; Fyrri', nextText: 'N&aelig;sti &#x3e;', currentText: '&Iacute; dag', monthNames: ['Jan&uacute;ar','Febr&uacute;ar','Mars','Apr&iacute;l','Ma&iacute','J&uacute;n&iacute;', 'J&uacute;l&iacute;','&Aacute;g&uacute;st','September','Okt&oacute;ber','N&oacute;vember','Desember'], monthNamesShort: ['Jan','Feb','Mar','Apr','Ma&iacute;','J&uacute;n', 'J&uacute;l','&Aacute;g&uacute;','Sep','Okt','N&oacute;v','Des'], dayNames: ['Sunnudagur','M&aacute;nudagur','&THORN;ri&eth;judagur','Mi&eth;vikudagur','Fimmtudagur','F&ouml;studagur','Laugardagur'], dayNamesShort: ['Sun','M&aacute;n','&THORN;ri','Mi&eth;','Fim','F&ouml;s','Lau'], dayNamesMin: ['Su','M&aacute;','&THORN;r','Mi','Fi','F&ouml;','La'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['is']); });/* Italian initialisation for the jQuery UI date picker plugin. */ /* Written by Apaella (apaella@gmail.com). */ jQuery(function($){ $.datepicker.regional['it'] = { closeText: 'Chiudi', prevText: '&#x3c;Prec', nextText: 'Succ&#x3e;', currentText: 'Oggi', monthNames: ['Gennaio','Febbraio','Marzo','Aprile','Maggio','Giugno', 'Luglio','Agosto','Settembre','Ottobre','Novembre','Dicembre'], monthNamesShort: ['Gen','Feb','Mar','Apr','Mag','Giu', 'Lug','Ago','Set','Ott','Nov','Dic'], dayNames: ['Domenica','Luned&#236','Marted&#236','Mercoled&#236','Gioved&#236','Venerd&#236','Sabato'], dayNamesShort: ['Dom','Lun','Mar','Mer','Gio','Ven','Sab'], dayNamesMin: ['Do','Lu','Ma','Me','Gio','Ve','Sa'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['it']); }); /* Japanese initialisation for the jQuery UI date picker plugin. */ /* Written by Kentaro SATO (kentaro@ranvis.com). */ jQuery(function($){ $.datepicker.regional['ja'] = { closeText: '閉じる', prevText: '&#x3c;前', nextText: '次&#x3e;', currentText: '今日', monthNames: ['1月','2月','3月','4月','5月','6月', '7月','8月','9月','10月','11月','12月'], monthNamesShort: ['1月','2月','3月','4月','5月','6月', '7月','8月','9月','10月','11月','12月'], dayNames: ['日曜日','月曜日','火曜日','水曜日','木曜日','金曜日','土曜日'], dayNamesShort: ['日','月','火','水','木','金','土'], dayNamesMin: ['日','月','火','水','木','金','土'], dateFormat: 'yy/mm/dd', firstDay: 0, isRTL: false, showMonthAfterYear: true}; $.datepicker.setDefaults($.datepicker.regional['ja']); });/* Korean initialisation for the jQuery calendar extension. */ /* Written by DaeKwon Kang (ncrash.dk@gmail.com). */ jQuery(function($){ $.datepicker.regional['ko'] = { closeText: '닫기', prevText: '이전달', nextText: '다음달', currentText: '오늘', monthNames: ['1월(JAN)','2월(FEB)','3월(MAR)','4월(APR)','5월(MAY)','6월(JUN)', '7월(JUL)','8월(AUG)','9월(SEP)','10월(OCT)','11월(NOV)','12월(DEC)'], monthNamesShort: ['1월(JAN)','2월(FEB)','3월(MAR)','4월(APR)','5월(MAY)','6월(JUN)', '7월(JUL)','8월(AUG)','9월(SEP)','10월(OCT)','11월(NOV)','12월(DEC)'], dayNames: ['일','월','화','수','목','금','토'], dayNamesShort: ['일','월','화','수','목','금','토'], dayNamesMin: ['일','월','화','수','목','금','토'], dateFormat: 'yy-mm-dd', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['ko']); });/* Lithuanian (UTF-8) initialisation for the jQuery UI date picker plugin. */ /* @author Arturas Paleicikas <arturas@avalon.lt> */ jQuery(function($){ $.datepicker.regional['lt'] = { closeText: 'Uždaryti', prevText: '&#x3c;Atgal', nextText: 'Pirmyn&#x3e;', currentText: 'Šiandien', monthNames: ['Sausis','Vasaris','Kovas','Balandis','Gegužė','Birželis', 'Liepa','Rugpjūtis','Rugsėjis','Spalis','Lapkritis','Gruodis'], monthNamesShort: ['Sau','Vas','Kov','Bal','Geg','Bir', 'Lie','Rugp','Rugs','Spa','Lap','Gru'], dayNames: ['sekmadienis','pirmadienis','antradienis','trečiadienis','ketvirtadienis','penktadienis','šeštadienis'], dayNamesShort: ['sek','pir','ant','tre','ket','pen','šeš'], dayNamesMin: ['Se','Pr','An','Tr','Ke','Pe','Še'], dateFormat: 'yy-mm-dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['lt']); });/* Latvian (UTF-8) initialisation for the jQuery UI date picker plugin. */ /* @author Arturas Paleicikas <arturas.paleicikas@metasite.net> */ jQuery(function($){ $.datepicker.regional['lv'] = { closeText: 'Aizvērt', prevText: 'Iepr', nextText: 'Nāka', currentText: 'Šodien', monthNames: ['Janvāris','Februāris','Marts','Aprīlis','Maijs','Jūnijs', 'Jūlijs','Augusts','Septembris','Oktobris','Novembris','Decembris'], monthNamesShort: ['Jan','Feb','Mar','Apr','Mai','Jūn', 'Jūl','Aug','Sep','Okt','Nov','Dec'], dayNames: ['svētdiena','pirmdiena','otrdiena','trešdiena','ceturtdiena','piektdiena','sestdiena'], dayNamesShort: ['svt','prm','otr','tre','ctr','pkt','sst'], dayNamesMin: ['Sv','Pr','Ot','Tr','Ct','Pk','Ss'], dateFormat: 'dd-mm-yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['lv']); });/* Malaysian initialisation for the jQuery UI date picker plugin. */ /* Written by Mohd Nawawi Mohamad Jamili (nawawi@ronggeng.net). */ jQuery(function($){ $.datepicker.regional['ms'] = { closeText: 'Tutup', prevText: '&#x3c;Sebelum', nextText: 'Selepas&#x3e;', currentText: 'hari ini', monthNames: ['Januari','Februari','Mac','April','Mei','Jun', 'Julai','Ogos','September','Oktober','November','Disember'], monthNamesShort: ['Jan','Feb','Mac','Apr','Mei','Jun', 'Jul','Ogo','Sep','Okt','Nov','Dis'], dayNames: ['Ahad','Isnin','Selasa','Rabu','Khamis','Jumaat','Sabtu'], dayNamesShort: ['Aha','Isn','Sel','Rab','kha','Jum','Sab'], dayNamesMin: ['Ah','Is','Se','Ra','Kh','Ju','Sa'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['ms']); });/* Dutch (UTF-8) initialisation for the jQuery UI date picker plugin. */ /* Written by Mathias Bynens <http://mathiasbynens.be/> */ jQuery(function($){ $.datepicker.regional.nl = { closeText: 'Sluiten', prevText: '←', nextText: '→', currentText: 'Vandaag', monthNames: ['januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september', 'oktober', 'november', 'december'], monthNamesShort: ['jan', 'feb', 'maa', 'apr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec'], dayNames: ['zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag'], dayNamesShort: ['zon', 'maa', 'din', 'woe', 'don', 'vri', 'zat'], dayNamesMin: ['zo', 'ma', 'di', 'wo', 'do', 'vr', 'za'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional.nl); });/* Norwegian initialisation for the jQuery UI date picker plugin. */ /* Written by Naimdjon Takhirov (naimdjon@gmail.com). */ jQuery(function($){ $.datepicker.regional['no'] = { closeText: 'Lukk', prevText: '&laquo;Forrige', nextText: 'Neste&raquo;', currentText: 'I dag', monthNames: ['Januar','Februar','Mars','April','Mai','Juni', 'Juli','August','September','Oktober','November','Desember'], monthNamesShort: ['Jan','Feb','Mar','Apr','Mai','Jun', 'Jul','Aug','Sep','Okt','Nov','Des'], dayNamesShort: ['Søn','Man','Tir','Ons','Tor','Fre','Lør'], dayNames: ['Søndag','Mandag','Tirsdag','Onsdag','Torsdag','Fredag','Lørdag'], dayNamesMin: ['Sø','Ma','Ti','On','To','Fr','Lø'], dateFormat: 'yy-mm-dd', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['no']); }); /* Polish initialisation for the jQuery UI date picker plugin. */ /* Written by Jacek Wysocki (jacek.wysocki@gmail.com). */ jQuery(function($){ $.datepicker.regional['pl'] = { closeText: 'Zamknij', prevText: '&#x3c;Poprzedni', nextText: 'Następny&#x3e;', currentText: 'Dziś', monthNames: ['Styczeń','Luty','Marzec','Kwiecień','Maj','Czerwiec', 'Lipiec','Sierpień','Wrzesień','Październik','Listopad','Grudzień'], monthNamesShort: ['Sty','Lu','Mar','Kw','Maj','Cze', 'Lip','Sie','Wrz','Pa','Lis','Gru'], dayNames: ['Niedziela','Poniedzialek','Wtorek','Środa','Czwartek','Piątek','Sobota'], dayNamesShort: ['Nie','Pn','Wt','Śr','Czw','Pt','So'], dayNamesMin: ['N','Pn','Wt','Śr','Cz','Pt','So'], dateFormat: 'yy-mm-dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['pl']); }); /* Brazilian initialisation for the jQuery UI date picker plugin. */ /* Written by Leonildo Costa Silva (leocsilva@gmail.com). */ jQuery(function($){ $.datepicker.regional['pt-BR'] = { closeText: 'Fechar', prevText: '&#x3c;Anterior', nextText: 'Pr&oacute;ximo&#x3e;', currentText: 'Hoje', monthNames: ['Janeiro','Fevereiro','Mar&ccedil;o','Abril','Maio','Junho', 'Julho','Agosto','Setembro','Outubro','Novembro','Dezembro'], monthNamesShort: ['Jan','Fev','Mar','Abr','Mai','Jun', 'Jul','Ago','Set','Out','Nov','Dez'], dayNames: ['Domingo','Segunda-feira','Ter&ccedil;a-feira','Quarta-feira','Quinta-feira','Sexta-feira','Sabado'], dayNamesShort: ['Dom','Seg','Ter','Qua','Qui','Sex','Sab'], dayNamesMin: ['Dom','Seg','Ter','Qua','Qui','Sex','Sab'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['pt-BR']); });/* Romanian initialisation for the jQuery UI date picker plugin. */ /* Written by Edmond L. (ll_edmond@walla.com). */ jQuery(function($){ $.datepicker.regional['ro'] = { closeText: 'Inchide', prevText: '&#x3c;Anterior', nextText: 'Urmator&#x3e;', currentText: 'Azi', monthNames: ['Ianuarie','Februarie','Martie','Aprilie','Mai','Junie', 'Julie','August','Septembrie','Octobrie','Noiembrie','Decembrie'], monthNamesShort: ['Ian', 'Feb', 'Mar', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Noi', 'Dec'], dayNames: ['Duminica', 'Luni', 'Marti', 'Miercuri', 'Joi', 'Vineri', 'Sambata'], dayNamesShort: ['Dum', 'Lun', 'Mar', 'Mie', 'Joi', 'Vin', 'Sam'], dayNamesMin: ['Du','Lu','Ma','Mi','Jo','Vi','Sa'], dateFormat: 'mm/dd/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['ro']); }); /* Russian (UTF-8) initialisation for the jQuery UI date picker plugin. */ /* Written by Andrew Stromnov (stromnov@gmail.com). */ jQuery(function($){ $.datepicker.regional['ru'] = { closeText: 'Закрыть', prevText: '&#x3c;Пред', nextText: 'След&#x3e;', currentText: 'Сегодня', monthNames: ['Январь','Февраль','Март','Апрель','Май','Июнь', 'Июль','Август','Сентябрь','Октябрь','Ноябрь','Декабрь'], monthNamesShort: ['Янв','Фев','Мар','Апр','Май','Июн', 'Июл','Авг','Сен','Окт','Ноя','Дек'], dayNames: ['воскресенье','понедельник','вторник','среда','четверг','пятница','суббота'], dayNamesShort: ['вск','пнд','втр','срд','чтв','птн','сбт'], dayNamesMin: ['Вс','Пн','Вт','Ср','Чт','Пт','Сб'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['ru']); });/* Slovak initialisation for the jQuery UI date picker plugin. */ /* Written by Vojtech Rinik (vojto@hmm.sk). */ jQuery(function($){ $.datepicker.regional['sk'] = { closeText: 'Zavrieť', prevText: '&#x3c;Predchádzajúci', nextText: 'Nasledujúci&#x3e;', currentText: 'Dnes', monthNames: ['Január','Február','Marec','Apríl','Máj','Jún', 'Júl','August','September','Október','November','December'], monthNamesShort: ['Jan','Feb','Mar','Apr','Máj','Jún', 'Júl','Aug','Sep','Okt','Nov','Dec'], dayNames: ['Nedel\'a','Pondelok','Utorok','Streda','Štvrtok','Piatok','Sobota'], dayNamesShort: ['Ned','Pon','Uto','Str','Štv','Pia','Sob'], dayNamesMin: ['Ne','Po','Ut','St','Št','Pia','So'], dateFormat: 'dd.mm.yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sk']); }); /* Slovenian initialisation for the jQuery UI date picker plugin. */ /* Written by Jaka Jancar (jaka@kubje.org). */ /* c = &#x10D;, s = &#x161; z = &#x17E; C = &#x10C; S = &#x160; Z = &#x17D; */ jQuery(function($){ $.datepicker.regional['sl'] = { closeText: 'Zapri', prevText: '&lt;Prej&#x161;nji', nextText: 'Naslednji&gt;', currentText: 'Trenutni', monthNames: ['Januar','Februar','Marec','April','Maj','Junij', 'Julij','Avgust','September','Oktober','November','December'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Avg','Sep','Okt','Nov','Dec'], dayNames: ['Nedelja','Ponedeljek','Torek','Sreda','&#x10C;etrtek','Petek','Sobota'], dayNamesShort: ['Ned','Pon','Tor','Sre','&#x10C;et','Pet','Sob'], dayNamesMin: ['Ne','Po','To','Sr','&#x10C;e','Pe','So'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sl']); }); /* Albanian initialisation for the jQuery UI date picker plugin. */ /* Written by Flakron Bytyqi (flakron@gmail.com). */ jQuery(function($){ $.datepicker.regional['sq'] = { closeText: 'mbylle', prevText: '&#x3c;mbrapa', nextText: 'Përpara&#x3e;', currentText: 'sot', monthNames: ['Janar','Shkurt','Mars','Pril','Maj','Qershor', 'Korrik','Gusht','Shtator','Tetor','Nëntor','Dhjetor'], monthNamesShort: ['Jan','Shk','Mar','Pri','Maj','Qer', 'Kor','Gus','Sht','Tet','Nën','Dhj'], dayNames: ['E Diel','E Hënë','E Martë','E Mërkurë','E Enjte','E Premte','E Shtune'], dayNamesShort: ['Di','Hë','Ma','Më','En','Pr','Sh'], dayNamesMin: ['Di','Hë','Ma','Më','En','Pr','Sh'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sq']); }); /* Serbian i18n for the jQuery UI date picker plugin. */ /* Written by Dejan Dimić. */ jQuery(function($){ $.datepicker.regional['sr-SR'] = { closeText: 'Zatvori', prevText: '&#x3c;', nextText: '&#x3e;', currentText: 'Danas', monthNames: ['Januar','Februar','Mart','April','Maj','Jun', 'Jul','Avgust','Septembar','Oktobar','Novembar','Decembar'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Avg','Sep','Okt','Nov','Dec'], dayNames: ['Nedelja','Ponedeljak','Utorak','Sreda','Četvrtak','Petak','Subota'], dayNamesShort: ['Ned','Pon','Uto','Sre','Čet','Pet','Sub'], dayNamesMin: ['Ne','Po','Ut','Sr','Če','Pe','Su'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sr-SR']); }); /* Serbian i18n for the jQuery UI date picker plugin. */ /* Written by Dejan Dimić. */ jQuery(function($){ $.datepicker.regional['sr'] = { closeText: 'Затвори', prevText: '&#x3c;', nextText: '&#x3e;', currentText: 'Данас', monthNames: ['Јануар','Фебруар','Март','Април','Мај','Јун', 'Јул','Август','Септембар','Октобар','Новембар','Децембар'], monthNamesShort: ['Јан','Феб','Мар','Апр','Мај','Јун', 'Јул','Авг','Сеп','Окт','Нов','Дец'], dayNames: ['Недеља','Понедељак','Уторак','Среда','Четвртак','Петак','Субота'], dayNamesShort: ['Нед','Пон','Уто','Сре','Чет','Пет','Суб'], dayNamesMin: ['Не','По','Ут','Ср','Че','Пе','Су'], dateFormat: 'dd/mm/yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sr']); }); /* Swedish initialisation for the jQuery UI date picker plugin. */ /* Written by Anders Ekdahl ( anders@nomadiz.se). */ jQuery(function($){ $.datepicker.regional['sv'] = { closeText: 'Stäng', prevText: '&laquo;Förra', nextText: 'Nästa&raquo;', currentText: 'Idag', monthNames: ['Januari','Februari','Mars','April','Maj','Juni', 'Juli','Augusti','September','Oktober','November','December'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Aug','Sep','Okt','Nov','Dec'], dayNamesShort: ['Sön','Mån','Tis','Ons','Tor','Fre','Lör'], dayNames: ['Söndag','Måndag','Tisdag','Onsdag','Torsdag','Fredag','Lördag'], dayNamesMin: ['Sö','Må','Ti','On','To','Fr','Lö'], dateFormat: 'yy-mm-dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['sv']); }); /* Thai initialisation for the jQuery UI date picker plugin. */ /* Written by pipo (pipo@sixhead.com). */ jQuery(function($){ $.datepicker.regional['th'] = { closeText: 'ปิด', prevText: '&laquo;&nbsp;ย้อน', nextText: 'ถัดไป&nbsp;&raquo;', currentText: 'วันนี้', monthNames: ['มกราคม','กุมภาพันธ์','มีนาคม','เมษายน','พฤษภาคม','มิถุนายน', 'กรกฏาคม','สิงหาคม','กันยายน','ตุลาคม','พฤศจิกายน','ธันวาคม'], monthNamesShort: ['ม.ค.','ก.พ.','มี.ค.','เม.ย.','พ.ค.','มิ.ย.', 'ก.ค.','ส.ค.','ก.ย.','ต.ค.','พ.ย.','ธ.ค.'], dayNames: ['อาทิตย์','จันทร์','อังคาร','พุธ','พฤหัสบดี','ศุกร์','เสาร์'], dayNamesShort: ['อา.','จ.','อ.','พ.','พฤ.','ศ.','ส.'], dayNamesMin: ['อา.','จ.','อ.','พ.','พฤ.','ศ.','ส.'], dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['th']); });/* Turkish initialisation for the jQuery UI date picker plugin. */ /* Written by Izzet Emre Erkan (kara@karalamalar.net). */ jQuery(function($){ $.datepicker.regional['tr'] = { closeText: 'kapat', prevText: '&#x3c;geri', nextText: 'ileri&#x3e', currentText: 'bugün', monthNames: ['Ocak','Şubat','Mart','Nisan','Mayıs','Haziran', 'Temmuz','Ağustos','Eylül','Ekim','Kasım','Aralık'], monthNamesShort: ['Oca','Şub','Mar','Nis','May','Haz', 'Tem','Ağu','Eyl','Eki','Kas','Ara'], dayNames: ['Pazar','Pazartesi','Salı','Çarşamba','Perşembe','Cuma','Cumartesi'], dayNamesShort: ['Pz','Pt','Sa','Ça','Pe','Cu','Ct'], dayNamesMin: ['Pz','Pt','Sa','Ça','Pe','Cu','Ct'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['tr']); });/* Ukrainian (UTF-8) initialisation for the jQuery UI date picker plugin. */ /* Written by Maxim Drogobitskiy (maxdao@gmail.com). */ jQuery(function($){ $.datepicker.regional['uk'] = { closeText: 'Закрити', prevText: '&#x3c;', nextText: '&#x3e;', currentText: 'Сьогодні', monthNames: ['Січень','Лютий','Березень','Квітень','Травень','Червень', 'Липень','Серпень','Вересень','Жовтень','Листопад','Грудень'], monthNamesShort: ['Січ','Лют','Бер','Кві','Тра','Чер', 'Лип','Сер','Вер','Жов','Лис','Гру'], dayNames: ['неділя','понеділок','вівторок','середа','четвер','пятниця','суббота'], dayNamesShort: ['нед','пнд','вів','срд','чтв','птн','сбт'], dayNamesMin: ['Нд','Пн','Вт','Ср','Чт','Пт','Сб'], dateFormat: 'dd.mm.yy', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['uk']); });/* Chinese initialisation for the jQuery UI date picker plugin. */ /* Written by Cloudream (cloudream@gmail.com). */ jQuery(function($){ $.datepicker.regional['zh-CN'] = { closeText: '关闭', prevText: '&#x3c;上月', nextText: '下月&#x3e;', currentText: '今天', monthNames: ['一月','二月','三月','四月','五月','六月', '七月','八月','九月','十月','十一月','十二月'], monthNamesShort: ['一','二','三','四','五','六', '七','八','九','十','十一','十二'], dayNames: ['星期日','星期一','星期二','星期三','星期四','星期五','星期六'], dayNamesShort: ['周日','周一','周二','周三','周四','周五','周六'], dayNamesMin: ['日','一','二','三','四','五','六'], dateFormat: 'yy-mm-dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['zh-CN']); }); /* Chinese initialisation for the jQuery UI date picker plugin. */ /* Written by Ressol (ressol@gmail.com). */ jQuery(function($){ $.datepicker.regional['zh-TW'] = { closeText: '關閉', prevText: '&#x3c;上月', nextText: '下月&#x3e;', currentText: '今天', monthNames: ['一月','二月','三月','四月','五月','六月', '七月','八月','九月','十月','十一月','十二月'], monthNamesShort: ['一','二','三','四','五','六', '七','八','九','十','十一','十二'], dayNames: ['星期日','星期一','星期二','星期三','星期四','星期五','星期六'], dayNamesShort: ['周日','周一','周二','周三','周四','周五','周六'], dayNamesMin: ['日','一','二','三','四','五','六'], dateFormat: 'yy/mm/dd', firstDay: 1, isRTL: false}; $.datepicker.setDefaults($.datepicker.regional['zh-TW']); });
PypiClean
/OASYS1-SRW-1.1.106.tar.gz/OASYS1-SRW-1.1.106/orangecontrib/srw/widgets/gui/ow_srw_grating.py
import numpy from PyQt5.QtWidgets import QMessageBox from orangewidget import gui from orangewidget.settings import Setting from oasys.widgets import gui as oasysgui from oasys.widgets import congruence from oasys.util.oasys_util import TriggerOut from syned.beamline.optical_elements.gratings.grating import Grating from syned.widget.widget_decorator import WidgetDecorator from orangecontrib.srw.widgets.gui.ow_srw_optical_element import OWSRWOpticalElement from orangecontrib.srw.util.srw_util import ShowErrorProfileDialog from orangecontrib.srw.util.srw_objects import SRWData, SRWPreProcessorData, SRWErrorProfileData class OWSRWGrating(OWSRWOpticalElement): tangential_size = Setting(1.2) sagittal_size = Setting(0.01) horizontal_position_of_mirror_center = Setting(0.0) vertical_position_of_mirror_center = Setting(0.0) add_acceptance_slit = Setting(0) automatic_orientation = Setting(1) has_height_profile = Setting(0) height_profile_data_file = Setting("mirror.dat") height_profile_data_file_dimension = Setting(0) height_amplification_coefficient = Setting(1.0) diffraction_order = Setting(1) grooving_density_0 = Setting(800.0) # groove density [lines/mm] (coefficient a0 in the polynomial groove density: a0 + a1*y + a2*y^2 + a3*y^3 + a4*y^4) grooving_density_1 = Setting(0.0) # groove density polynomial coefficient a1 [lines/mm\u00b2] grooving_density_2 = Setting(0.0) # groove density polynomial coefficient a2 [lines/mm\u00b3] grooving_density_3 = Setting(0.0) # groove density polynomial coefficient a3 [lines/mm\u2074] grooving_density_4 = Setting(0.0) # groove density polynomial coefficient a4 [lines/mm\u2075] grooving_angle = Setting(0.0) # angle between the groove direction and the sagittal direction of the substrate inputs = [("SRWData", SRWData, "set_input"), ("Trigger", TriggerOut, "propagate_new_wavefront"), ("PreProcessor Data", SRWPreProcessorData, "setPreProcessorData"), WidgetDecorator.syned_input_data()[0]] def __init__(self): super().__init__(azimuth_hor_vert=True) def draw_specific_box(self): tabs_grat = oasysgui.tabWidget(self.tab_bas) tab_grat = oasysgui.createTabPage(tabs_grat, "Grating") tab_errp = oasysgui.createTabPage(tabs_grat, "Error Profile") self.grating_setting = oasysgui.tabWidget(tab_grat) substrate_tab = oasysgui.createTabPage(self.grating_setting, "Substrate Mirror Setting") grooving_tab = oasysgui.createTabPage(self.grating_setting, "Grooving Setting") self.substrate_box = oasysgui.widgetBox(substrate_tab, "", addSpace=False, orientation="vertical") self.grooving_box = oasysgui.widgetBox(grooving_tab, "", addSpace=False, orientation="vertical") oasysgui.lineEdit(self.substrate_box, self, "tangential_size", "Tangential Size [m]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.substrate_box, self, "sagittal_size", "Sagittal_Size [m]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.substrate_box, self, "horizontal_position_of_mirror_center", "Horizontal position of mirror center [m]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.substrate_box, self, "vertical_position_of_mirror_center", "Vertical position of mirror center [m]", labelWidth=260, valueType=float, orientation="horizontal") gui.comboBox(self.substrate_box, self, "add_acceptance_slit", label="Add Acceptance Slit", items=["No", "Yes"], labelWidth=300, sendSelectedValue=False, orientation="horizontal") gui.comboBox(self.substrate_box, self, "automatic_orientation", label="Automatic Orientation of Output Optical Axis", items=["No", "Yes"], labelWidth=300, sendSelectedValue=False, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "diffraction_order", "Diffraction order", labelWidth=260, valueType=int, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_angle", "Angle between groove direction and\nsagittal direction of the substrate [deg]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_density_0", "Groove density [lines/mm]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_density_1", "Groove den. poly. coeff. a1 [lines/mm\u00b2]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_density_2", "Groove den. poly. coeff. a2 [lines/mm\u00b3]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_density_3", "Groove den. poly. coeff. a3 [lines/mm\u2074]", labelWidth=260, valueType=float, orientation="horizontal") oasysgui.lineEdit(self.grooving_box, self, "grooving_density_4", "Groove den. poly. coeff. a4 [lines/mm\u2075]", labelWidth=260, valueType=float, orientation="horizontal") self.error_box = oasysgui.widgetBox(tab_errp, "", addSpace=False, orientation="vertical") gui.comboBox(self.error_box, self, "has_height_profile", label="Use Height Error Profile", items=["No", "Yes"], labelWidth=300, sendSelectedValue=False, orientation="horizontal", callback=self.set_HeightProfile) gui.separator(self.error_box) self.height_profile_box_1 = oasysgui.widgetBox(self.error_box, "", addSpace=False, orientation="vertical", height=110) self.height_profile_box_2 = oasysgui.widgetBox(self.error_box, "", addSpace=False, orientation="vertical", height=110) file_box = oasysgui.widgetBox(self.height_profile_box_2, "", addSpace=False, orientation="horizontal") self.le_height_profile_data_file = oasysgui.lineEdit(file_box, self, "height_profile_data_file", "Height profile data file", labelWidth=155, valueType=str, orientation="horizontal") gui.button(file_box, self, "...", callback=self.selectHeightProfileDataFile) file_box_2 = oasysgui.widgetBox(self.height_profile_box_2, "", addSpace=False, orientation="horizontal") gui.comboBox(file_box_2, self, "height_profile_data_file_dimension", label="Dimension", items=["1", "2"], labelWidth=280, sendSelectedValue=False, orientation="horizontal") gui.button(file_box_2, self, "View", callback=self.view_height_profile) oasysgui.lineEdit(self.height_profile_box_2, self, "height_amplification_coefficient", "Height Amplification Coefficient", labelWidth=260, valueType=float, orientation="horizontal") self.set_HeightProfile() def selectHeightProfileDataFile(self): self.le_height_profile_data_file.setText(oasysgui.selectFileFromDialog(self, self.height_profile_data_file, "Height profile data file")) def set_HeightProfile(self): self.height_profile_box_1.setVisible(self.has_height_profile==0) self.height_profile_box_2.setVisible(self.has_height_profile==1) def get_optical_element(self): grating = self.get_grating_instance() grating.tangential_size=self.tangential_size grating.sagittal_size=self.sagittal_size grating.grazing_angle=numpy.radians(90-self.angle_radial) grating.orientation_of_reflection_plane=self.orientation_azimuthal grating.invert_tangent_component = self.invert_tangent_component == 1 grating.add_acceptance_slit=self.add_acceptance_slit == 1 grating.height_profile_data_file=self.height_profile_data_file if self.has_height_profile else None grating.height_profile_data_file_dimension=self.height_profile_data_file_dimension + 1 grating.height_amplification_coefficient=self.height_amplification_coefficient grating.diffraction_order=self.diffraction_order grating.grooving_density_0=self.grooving_density_0 grating.grooving_density_1=self.grooving_density_1 grating.grooving_density_2=self.grooving_density_2 grating.grooving_density_3=self.grooving_density_3 grating.grooving_density_4=self.grooving_density_4 grating.grooving_angle=numpy.radians(self.grooving_angle) return grating def set_additional_parameters(self, beamline_element, propagation_parameters, beamline): if self.automatic_orientation == 1: grating = beamline.get_beamline_element_at(-1).get_optical_element() orientation_of_the_output_optical_axis_vector_x, \ orientation_of_the_output_optical_axis_vector_y, \ orientation_of_the_output_optical_axis_vector_z, \ orientation_of_the_horizontal_base_vector_x , \ orientation_of_the_horizontal_base_vector_y = grating.get_output_orientation_vectors(self.input_srw_data.get_srw_wavefront().get_photon_energy()) self.oe_orientation_of_the_output_optical_axis_vector_x = round(orientation_of_the_output_optical_axis_vector_x, 8) self.oe_orientation_of_the_output_optical_axis_vector_y = round(orientation_of_the_output_optical_axis_vector_y, 8) self.oe_orientation_of_the_output_optical_axis_vector_z = round(orientation_of_the_output_optical_axis_vector_z, 8) self.oe_orientation_of_the_horizontal_base_vector_x = round(orientation_of_the_horizontal_base_vector_x, 8) self.oe_orientation_of_the_horizontal_base_vector_y = round(orientation_of_the_horizontal_base_vector_y, 8) super(OWSRWGrating, self).set_additional_parameters(beamline_element, propagation_parameters, beamline) def get_grating_instance(self): raise NotImplementedError() def receive_specific_syned_data(self, optical_element): if not optical_element is None: if isinstance(optical_element, Grating): boundaries = optical_element.get_boundary_shape().get_boundaries() self.tangential_size=round(abs(boundaries[3] - boundaries[2]), 6) self.sagittal_size=round(abs(boundaries[1] - boundaries[0]), 6) self.vertical_position_of_mirror_center = round(0.5*(boundaries[3] + boundaries[2]), 6) self.horizontal_position_of_mirror_center = round(0.5*(boundaries[1] + boundaries[0]), 6) self.grooving_density_0=optical_element._ruling*1e-3 self.receive_shape_specific_syned_data(optical_element) else: raise Exception("Syned Data not correct: Optical Element is not a Grating") else: raise Exception("Syned Data not correct: Empty Optical Element") def receive_shape_specific_syned_data(self, optical_element): raise NotImplementedError def check_data(self): super().check_data() congruence.checkStrictlyPositiveNumber(self.tangential_size, "Tangential Size") congruence.checkStrictlyPositiveNumber(self.sagittal_size, "Sagittal Size") if self.has_height_profile: congruence.checkFile(self.height_profile_data_file) congruence.checkPositiveNumber(self.diffraction_order, "Diffraction Order") congruence.checkStrictlyPositiveNumber(self.grooving_density_0, "Groove density") def setPreProcessorData(self, data): if data is not None: try: if not data.error_profile_data is None: if data.error_profile_data.error_profile_data_file != SRWErrorProfileData.NONE: self.height_profile_data_file = data.error_profile_data.error_profile_data_file self.height_profile_data_file_dimension = 1 self.has_height_profile = 1 self.set_HeightProfile() changed = False if self.sagittal_size > data.error_profile_data.error_profile_x_dim or \ self.tangential_size > data.error_profile_data.error_profile_y_dim: changed = True if changed: if QMessageBox.information(self, "Confirm Modification", "Dimensions of this O.E. must be changed in order to ensure congruence with the error profile surface, accept?", QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes: if self.sagittal_size > data.error_profile_data.error_profile_x_dim: self.sagittal_size = data.error_profile_data.error_profile_x_dim if self.tangential_size > data.error_profile_data.error_profile_y_dim: self.tangential_size = data.error_profile_data.error_profile_y_dim QMessageBox.information(self, "QMessageBox.information()", "Dimensions of this O.E. were changed", QMessageBox.Ok) except Exception as exception: QMessageBox.critical(self, "Error", str(exception), QMessageBox.Ok) def view_height_profile(self): pass try: dialog = ShowErrorProfileDialog(parent=self, file_name=self.height_profile_data_file, dimension=self.height_profile_data_file_dimension+1) dialog.show() except Exception as exception: QMessageBox.critical(self, "Error", str(exception), QMessageBox.Ok)
PypiClean
/LSSTDESC.Coord-1.3.0.tar.gz/LSSTDESC.Coord-1.3.0/coord/angle.py
import math import numpy as np from .angleunit import AngleUnit, radians, degrees, hours, arcmin, arcsec class Angle(object): """A class representing an Angle. Angles are a value with an AngleUnit. **Initialization:** You typically create an Angle by multiplying a number by a coord.AngleUnit, for example: .. >>> pixel = 0.27 * arcsec >>> ra = 13.4 * hours >>> dec = -32 * degrees >>> from math import pi >>> theta = pi/2. * radians You can also initialize explicitly, taking a value and a unit: :meth:`coord.Angle.__init__` >>> unit = AngleUnit(math.pi / 100) # gradians >>> phi = Angle(90, unit) **Built-in units:** There are five built-in AngleUnits which are always available for use: :coord.radians: coord.AngleUnit(1.) :coord.degrees: coord.AngleUnit(pi / 180.) :coord.hours: coord.AngleUnit(pi / 12.) :coord.arcmin: coord.AngleUnit(pi / 180. / 60.) :coord.arcsec: coord.AngleUnit(pi / 180. / 3600.) **Attribute:** Since extracting the value in radians is extremely common, we have a read-only attribute to do this quickly: :rad: The measure of the unit in radians. For example: .. >>> theta = 90 * degrees >>> print(theta.rad) 1.5707963267948966 It is equivalent to the more verbose: .. >>> x = theta / radians >>> print(x) 1.5707963267948966 but without actually requiring the floating point operation of dividing by 1. **Arithmetic:** Allowed arithmetic with Angles include the following. In the list below, - ``x`` is an arbitrary ``float`` value - ``unit1`` and ``unit2`` are arbitrary `AngleUnit` instances - ``theta1`` and ``theta2`` are arbitrary `Angle` instances >>> x = 37.8 >>> unit1 = arcmin >>> unit2 = degrees >>> theta1 = x * unit1 >>> theta2 = x * unit2 >>> x2 = theta1 / unit2 >>> theta = theta1 + theta2 >>> theta = theta1 - theta2 >>> theta = theta1 * x >>> theta = x * theta1 >>> theta = theta1 / x >>> theta = -theta1 >>> theta += theta1 >>> theta -= theta1 >>> theta *= x >>> theta /= x >>> x = unit1 / unit2 # equivalent to x = (1 * unit1) / unit2 The above operations on NumPy arrays containing Angles are permitted as well. **Trigonometry:** There are convenience function for getting the sin, cos, and tan of an angle, along with one for getting sin and cos together, which should be more efficient than doing sin and cos separately: | :meth:`coord.Angle.sin` | :meth:`coord.Angle.cos` | :meth:`coord.Angle.tan` | :meth:`coord.Angle.sincos` >>> sint = theta.sin() # equivalent to sint = math.sin(theta.rad) >>> cost = theta.cos() # equivalent to cost = math.cos(theta.rad) >>> tant = theta.tan() # equivalent to tant = math.tan(theta.rad) >>> sint, cost = theta.sincos() These functions mean that numpy trig functions will work on Angles or arrays of Angles: .. >>> sint = np.sin(theta) >>> cost = np.cos(theta) >>> tant = np.tan(theta) **Wrapping:** Depending on the context, theta = 2pi radians and theta = 0 radians may mean the same thing. If you want your angles to be wrapped to [-pi,pi) radians, you can do this by calling :meth:`coord.Angle.wrap` >>> theta = theta.wrap() This could be appropriate before testing for the equality of two angles for example, or calculating the difference between them. There is also an option to wrap into a different 2 pi range if so desired by specifying the center of the range. """ def __init__(self, theta, unit=None): """ :param theta: The numerical value of the angle. :param unit: The units theta is measured in. """ # We also want to allow angle1 = Angle(angle2) as a copy, so check for that. if isinstance(theta,Angle): if unit is not None: raise TypeError("Cannot provide unit if theta is already an Angle instance") self._rad = theta._rad elif unit is None: raise TypeError("Must provide unit for Angle.__init__") elif not isinstance(unit, AngleUnit): raise TypeError("Invalid unit %s of type %s"%(unit,type(unit))) else: # Normal case self._rad = float(theta) * unit.value @property def rad(self): """Return the Angle in radians. Equivalent to angle / coord.radians """ return self._rad @property def deg(self): """Return the Angle in degrees. Equivalent to angle / coord.degrees """ return self / degrees def __neg__(self): return _Angle(-self._rad) def __add__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot add %s of type %s to an Angle"%(other,type(other))) return _Angle(self._rad + other._rad) def __sub__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot subtract %s of type %s from an Angle"%(other,type(other))) return _Angle(self._rad - other._rad) def __mul__(self, other): if other != float(other): raise TypeError("Cannot multiply Angle by %s of type %s"%(other,type(other))) return _Angle(self._rad * other) __rmul__ = __mul__ def __div__(self, other): if isinstance(other, AngleUnit): return self._rad / other.value elif other == float(other): return _Angle(self._rad / other) else: raise TypeError("Cannot divide Angle by %s of type %s"%(other,type(other))) __truediv__ = __div__ def wrap(self, center=None): """Wrap Angle to lie in the range [-pi, pi) radians (or other range of 2pi radians) Depending on the context, theta = 2pi radians and theta = 0 radians are the same thing. If you want your angles to be wrapped to [-pi, pi) radians, you can do this as follows: .. >>> theta = Angle(700 * degrees) >>> theta = theta.wrap() >>> print(theta.deg) -19.99999999999998 This could be appropriate before testing for the equality of two angles for example, or calculating the difference between them. If you want to wrap to a different range than [-pi, pi), you can set the ``center`` argument to be the desired center of the the range. e.g. for return values to fall in [0, 2pi), you could call .. >>> theta = theta.wrap(center=180. * degrees) >>> print(theta / degrees) 340.0 :param center: The center point of the wrapped range. [default: 0 radians] :returns: the equivalent angle within the range [center-pi, center+pi) """ if center is None: center = _Angle(0.) start = center._rad - math.pi offset = (self._rad - start) // (2.*math.pi) # How many full cycles to subtract return _Angle(self._rad - offset * 2.*math.pi) def sin(self): """Return the sin of an Angle.""" return math.sin(self._rad) def cos(self): """Return the cos of an Angle.""" return math.cos(self._rad) def tan(self): """Return the tan of an Angle.""" return math.tan(self._rad) def sincos(self): """Return both the sin and cos of an Angle as a numpy array [sint, cost]. """ sin = math.sin(self._rad) cos = math.cos(self._rad) return sin, cos def __str__(self): return str(self._rad) + ' radians' def __repr__(self): return 'coord.Angle(%r, coord.radians)'%self.rad def __eq__(self, other): return isinstance(other,Angle) and self.rad == other.rad def __ne__(self, other): return not self.__eq__(other) def __le__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot compare %s of type %s to an Angle"%(other,type(other))) return self._rad <= other._rad def __lt__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot compare %s of type %s to an Angle"%(other,type(other))) return self._rad < other._rad def __ge__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot compare %s of type %s to an Angle"%(other,type(other))) return self._rad >= other._rad def __gt__(self, other): if not isinstance(other, Angle): raise TypeError("Cannot compare %s of type %s to an Angle"%(other,type(other))) return self._rad > other._rad def __hash__(self): return hash(('coord.Angle', self._rad)) @staticmethod def _make_dms_string(decimal, sep, prec, pad, plus_sign): # Account for the sign properly if decimal < 0: sign = '-' decimal = -decimal elif plus_sign: sign = '+' else: sign = '' # Figure out the 3 sep tokens sep1 = sep2 = '' sep3 = None if len(sep) == 1: sep1 = sep2 = sep elif len(sep) == 2: sep1, sep2 = sep elif len(sep) == 3: sep1, sep2, sep3 = sep # Round to nearest 1.e-8 seconds (or 10**-prec if given) round_prec = 8 if prec is None else prec digits = 10**round_prec decimal = int(3600 * digits * decimal + 0.5) d = decimal // (3600 * digits) decimal -= d * (3600 * digits) m = decimal // (60 * digits) decimal -= m * (60 * digits) s = decimal // digits decimal -= s * digits # Make the string if pad: d_str = '%02d'%d m_str = '%02d'%m s_str = '%02d'%s else: d_str = '%d'%d m_str = '%d'%m s_str = '%d'%s string = '%s%s%s%s%s%s.%0*d'%(sign,d_str,sep1,m_str,sep2,s_str,round_prec,decimal) if not prec: string = string.rstrip('0') string = string.rstrip('.') if sep3: string = string + sep3 return string def hms(self, sep=":", prec=None, pad=True, plus_sign=False): """Return an HMS representation of the angle as a string: +-hh:mm:ss.decimal. An optional ``sep`` parameter can change the : to something else (e.g. a space or nothing at all). Note: the reverse process is effected by :meth:`Angle.from_hms`: .. >>> angle = -5.357 * hours >>> hms = angle.hms() >>> print(hms) -05:21:25.2 >>> angle2 = Angle.from_hms(hms) >>> print(angle2 / hours) -5.356999999999999 :param sep: The token to put between the hh and mm and beteen mm and ss. This may also be a string of 2 or 3 items, e.g. 'hm' or 'hms'. Or even a tuple of strings such as ('hours ', 'minutes ', 'seconds'). [default: ':'] :param prec: The number of digits of precision after the decimal point. [default: None] :param pad: Whether to pad with a leading 0 if necessary to make h,m,s 2 digits. [default: True] :param plus_sign: Whether to use a plus sign for positive angles. [default: False] :returns: a string of the HMS representation of the angle. """ if not len(sep) <= 3: raise ValueError("sep must be a string or tuple of length <= 3") if prec is not None and not prec >= 0: raise ValueError("prec must be >= 0") return self._make_dms_string(self/hours, sep, prec, pad, plus_sign) def dms(self, sep=":", prec=None, pad=True, plus_sign=False): """Return a DMS representation of the angle as a string: +-dd:mm:ss.decimal An optional ``sep`` parameter can change the : to something else (e.g. a space or nothing at all). Note: the reverse process is effected by :meth:`Angle.from_dms`: .. >>> angle = -(5 * degrees + 21 * arcmin + 25.2 * arcsec) >>> dms = angle.dms() >>> print(dms) -05:21:25.2 >>> angle2 = Angle.from_dms(dms) >>> print(angle2 / degrees) -5.356999999999999 :param sep: The token to put between the hh and mm and beteen mm and ss. This may also be a string of 2 or 3 items, e.g. 'dm' or 'dms'. Or even a tuple of strings such as ('degrees ', 'minutes ', 'seconds'). [default: ':'] :param prec: The number of digits of precision after the decimal point. [default: None] :param pad: Whether to pad with a leading 0 if necessary to make h 2 digits. [default: True] :param plus_sign: Whether to use a plus sign for positive angles. [default: False] :returns: a string of the DMS representation of the angle. """ if not len(sep) <= 3: raise ValueError("sep must be a string or tuple of length <= 3") if prec is not None and not prec >= 0: raise ValueError("prec must be >= 0") return self._make_dms_string(self/degrees, sep, prec, pad, plus_sign) @staticmethod def from_hms(str): """Convert a string of the form hh:mm:ss.decimal into an Angle. There may be an initial + or - (or neither), then two digits for the hours, two for the minutes, and two for the seconds. Then there may be a decimal point followed by more digits. There may be a colon separating hh, mm, and ss, or whitespace, or nothing at all. In fact, the code will ignore any non-digits between the hours, minutes, and seconds. Note: the reverse process is effected by Angle.hms(): .. >>> angle = -5.357 * hours >>> hms = angle.hms() >>> print(hms) -05:21:25.2 >>> angle2 = Angle.from_hms(hms) >>> print(angle2 / hours) -5.356999999999999 :param str: The string to parse. :returns: the corresponding Angle instance """ return Angle._parse_dms(str) * hours @staticmethod def from_dms(str): """Convert a string of the form dd:mm:ss.decimal into an Angle. There may be an initial + or - (or neither), then two digits for the degrees, two for the minutes, and two for the seconds. Then there may be a decimal point followed by more digits. There may be a colon separating dd, mm, and ss, or whitespace, or nothing at all. In fact, the code will ignore any non-digits between the degrees, minutes, and seconds. Note: the reverse process is effected by Angle.dms(): .. >>> angle = -(5 * degrees + 21 * arcmin + 25.2 * arcsec) >>> dms = angle.dms() >>> print(dms) -05:21:25.2 >>> angle2 = Angle.from_dms(dms) >>> print(angle2 / degrees) -5.356999999999999 :param str: The string to parse. :returns: the corresponding Angle instance """ return Angle._parse_dms(str) * degrees @staticmethod def _parse_dms(dms): """Convert a string of the form dd:mm:ss.decimal into decimal degrees. """ import re tokens = tuple(filter(None, re.split('([\.\d]+)', dms.strip()))) if len(tokens) <= 1: raise ValueError("string is not of the expected format") sign = 1 try: dd = float(tokens[0]) except ValueError: if tokens[0].strip() == '-': sign = -1 tokens = tokens[1:] dd = float(tokens[0]) if len(tokens) <= 1: raise ValueError("string is not of the expected format") if len(tokens) <= 2: return sign * dd mm = float(tokens[2]) if len(tokens) <= 4: return sign * (dd + mm/60) if len(tokens) >= 7: raise ValueError("string is not of the expected format") ss = float(tokens[4]) return sign * (dd + mm/60. + ss/3600.) def _Angle(theta): """Equivalent to ``Angle(theta, coord.radians)``, but without the normal overhead (which isn't much to be honest, but this is nonetheless slightly quicker). :param theta: The numerical value of the angle in radians. """ ret = Angle.__new__(Angle) ret._rad = theta return ret
PypiClean
/NLP_LIB_cpu-0.0.12.tar.gz/NLP_LIB_cpu-0.0.12/NLP_LIB/callbacks/bert_lr_wrapper.py
from NLP_LIB.nlp_core.callback_wrapper import CallbackWrapper from tensorflow.keras.callbacks import Callback from tensorflow.keras import backend as K # This code implements BERT-like learning rate class BERTLearningRateWrapper(CallbackWrapper): def __init__(self, config, execution_config, model, dataset, input_data_transform, output_data_transform): super(BERTLearningRateWrapper, self).__init__(config, execution_config, model, dataset, input_data_transform, output_data_transform) class _K_DynamicLearningRate(Callback): def __init__(self, d_model, warmup=4000, scale=1.0): self.basic = 1e-4 self.basic = self.basic * scale self.warm = warmup # If will init step num from intial epoch of model # step_num = epoch x (training_data_count / batch_size) self.step_num = 0 self.lazy_init = False self.execution_config = execution_config def initialize_step_num(self): initial_epoch = self.execution_config['initial_epoch'] print('DynamicLearningRateWrapper ====> INIT') if initial_epoch > 0: print('DynamicLearningRateWrapper ====> INIT_FROM_EPOCH') batch_size = 1 if 'batch_size' in execution_config and execution_config['batch_size'] is not None: batch_size = execution_config['batch_size'] training_sample_count = 32 #try: (X, _, _, _) = model.load_encoded_data(dataset) training_sample_count = X.shape[0] #except: # pass print('Training Sample Count = ' + str(training_sample_count)) self.step_num = initial_epoch * (training_sample_count // batch_size + (training_sample_count % batch_size > 0)) print('Init Step Num ' + str(self) + ' from epoch: ' + str(initial_epoch) + ', batch_size: ' + str(batch_size) + ' => step_num: ' + str(self.step_num)) def on_batch_begin(self, batch, logs = None): # Lazy init step_num if self.lazy_init == False: self.initialize_step_num() self.lazy_init = True # print('DynamicLearningRateWrapper->on_batch_begin: ') self.step_num += 1 if self.step_num < self.warm: lr = self.step_num * self.basic / self.warm else: lr = self.basic # print('Setting Learning Rate to: ' + str(lr)) #try: K.set_value(self.model.optimizer.lr, lr) #except: # pass self.effective_lr = lr def on_epoch_begin(self, epoch, logs = None): # print('DynamicLearningRateWrapper->on_epoch_begin: ' + str(epoch)) pass scale = 1.0 if 'scale' in config: scale = config['scale'] self.keras_callback = _K_DynamicLearningRate(config['d_model'], config['warmup'], scale) # This function should return keras Callback instance constructed from configuration of this object. def get_keras_callback(self): return self.keras_callback # Unit Test if __name__ == '__main__': import cv2 import matplotlib.pyplot as plt config = { 'd_model': 512, 'warmup': 10000, 'scale': 1.0, } exec_config = { 'initial_epoch': 1, 'batch_size': 32, } dlr = BERTLearningRateWrapper(config, exec_config, None, None, None, None) cb = dlr.keras_callback lrs = [] eps = [] for i in range(50000): cb.on_batch_begin(None) lr = cb.effective_lr print('lr[' + str(i) + '] = ' + str(lr)) eps.append(i) lrs.append(lr) plt.plot(eps, lrs) plt.show() print('Finished.')
PypiClean
/EdaSpiffWorkflow-0.0.2.tar.gz/EdaSpiffWorkflow-0.0.2/EdaSpiffWorkflow_Aadesh_G/bpmn/serializer/Packager.py
from builtins import object # Copyright (C) 2012 Matthew Hampton # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA import os import configparser import glob import hashlib import inspect import zipfile from io import StringIO from optparse import OptionParser, OptionGroup from ..parser.BpmnParser import BpmnParser from ..parser.ValidationException import ValidationException from ..parser.util import xpath_eval, one from lxml import etree SIGNAVIO_NS = 'http://www.signavio.com' CONFIG_SECTION_NAME = "Packager Options" def md5hash(data): if not isinstance(data, bytes): data = data.encode('UTF-8') return hashlib.md5(data).hexdigest().lower() class Packager(object): """ The Packager class pre-parses a set of BPMN files (together with their SVG representation), validates the contents and then produces a ZIP-based archive containing the pre-parsed BPMN and SVG files, the source files (for reference) and a metadata.ini file that contains enough information to create a BpmnProcessSpec instance from the archive (e.g. the ID of the entry point process). This class can be extended and any public method overridden to do additional validation / parsing or to package additional metadata. Extension point: PARSER_CLASS: provide the class that should be used to parse the BPMN files. The fully-qualified name will be included in the metadata.ini file, so that the BpmnSerializer can instantiate the right parser to deal with the package. Editor hooks: package_for_editor_<editor name>(self, spec, filename): Called once for each BPMN file. Should add any additional files to the archive. """ METADATA_FILE = "metadata.ini" MANIFEST_FILE = "manifest.ini" PARSER_CLASS = BpmnParser def __init__(self, package_file, entry_point_process, meta_data=None, editor=None): """ Constructor. :param package_file: a file-like object where the contents of the package must be written to :param entry_point_process: the name or ID of the entry point process :param meta_data: A list of meta-data tuples to include in the metadata.ini file (in addition to the standard ones) :param editor: The name of the editor used to create the source BPMN / SVG files. This activates additional hook method calls. (optional) """ self.package_file = package_file self.entry_point_process = entry_point_process self.parser = self.PARSER_CLASS() self.meta_data = meta_data or [] self.input_files = [] self.input_path_prefix = None self.editor = editor self.manifest = {} def add_bpmn_file(self, filename): """ Add the given BPMN filename to the packager's set. """ self.add_bpmn_files([filename]) def add_bpmn_files_by_glob(self, g): """ Add all filenames matching the provided pattern (e.g. *.bpmn) to the packager's set. """ self.add_bpmn_files(glob.glob(g)) def add_bpmn_files(self, filenames): """ Add all filenames in the given list to the packager's set. """ self.input_files += filenames def create_package(self): """ Creates the package, writing the data out to the provided file-like object. """ # Check that all files exist (and calculate the longest shared path # prefix): self.input_path_prefix = None for filename in self.input_files: if not os.path.isfile(filename): raise ValueError( '%s does not exist or is not a file' % filename) if self.input_path_prefix: full = os.path.abspath(os.path.dirname(filename)) while not (full.startswith(self.input_path_prefix) and self.input_path_prefix): self.input_path_prefix = self.input_path_prefix[:-1] else: self.input_path_prefix = os.path.abspath( os.path.dirname(filename)) # Parse all of the XML: self.bpmn = {} for filename in self.input_files: bpmn = etree.parse(filename) self.bpmn[os.path.abspath(filename)] = bpmn # Now run through pre-parsing and validation: for filename, bpmn in list(self.bpmn.items()): bpmn = self.pre_parse_and_validate(bpmn, filename) self.bpmn[os.path.abspath(filename)] = bpmn # Now check that we can parse it fine: for filename, bpmn in list(self.bpmn.items()): self.parser.add_bpmn_xml(bpmn, filename=filename) # at this point, we have a item in self.wf_spec.get_specs_depth_first() # that has a filename of None and a bpmn that needs to be added to the # list below in for spec. self.wf_spec = self.parser.get_spec(self.entry_point_process) # Now package everything: self.package_zip = zipfile.ZipFile( self.package_file, "w", compression=zipfile.ZIP_DEFLATED) done_files = set() for spec in self.wf_spec.get_specs_depth_first(): filename = spec.file if filename is None: # This is for when we are doing a subworkflow, and it # creates something in the bpmn spec list, but it really has # no file. In this case, it is safe to skip the add to the # zip file. continue if filename not in done_files: done_files.add(filename) bpmn = self.bpmn[os.path.abspath(filename)] self.write_to_package_zip( "%s.bpmn" % spec.name, etree.tostring(bpmn.getroot())) self.write_to_package_zip( "src/" + self._get_zip_path(filename), filename) self._call_editor_hook('package_for_editor', spec, filename) self.write_meta_data() self.write_manifest() self.package_zip.close() def write_file_to_package_zip(self, filename, src_filename): """ Writes a local file in to the zip file and adds it to the manifest dictionary :param filename: The zip file name :param src_filename: the local file name """ f = open(src_filename) with f: data = f.read() self.manifest[filename] = md5hash(data) self.package_zip.write(src_filename, filename) def write_to_package_zip(self, filename, data): """ Writes data to the zip file and adds it to the manifest dictionary :param filename: The zip file name :param data: the data """ self.manifest[filename] = md5hash(data) self.package_zip.writestr(filename, data) def write_manifest(self): """ Write the manifest content to the zip file. It must be a predictable order. """ config = configparser.ConfigParser() config.add_section('Manifest') for f in sorted(self.manifest.keys()): config.set('Manifest', f.replace( '\\', '/').lower(), self.manifest[f]) ini = StringIO() config.write(ini) self.manifest_data = ini.getvalue() self.package_zip.writestr(self.MANIFEST_FILE, self.manifest_data) def pre_parse_and_validate(self, bpmn, filename): """ A subclass can override this method to provide additional parseing or validation. It should call the parent method first. :param bpmn: an lxml tree of the bpmn content :param filename: the source file name This must return the updated bpmn object (or a replacement) """ bpmn = self._call_editor_hook( 'pre_parse_and_validate', bpmn, filename) or bpmn return bpmn def pre_parse_and_validate_signavio(self, bpmn, filename): """ This is the Signavio specific editor hook for pre-parsing and validation. A subclass can override this method to provide additional parseing or validation. It should call the parent method first. :param bpmn: an lxml tree of the bpmn content :param filename: the source file name This must return the updated bpmn object (or a replacement) """ self._check_for_disconnected_boundary_events_signavio(bpmn, filename) self._fix_call_activities_signavio(bpmn, filename) return bpmn def _check_for_disconnected_boundary_events_signavio(self, bpmn, filename): # signavio sometimes disconnects a BoundaryEvent from it's owning task # They then show up as intermediateCatchEvents without any incoming # sequence flows xpath = xpath_eval(bpmn) for catch_event in xpath('.//bpmn:intermediateCatchEvent'): incoming = xpath( './/bpmn:sequenceFlow[@targetRef="%s"]' % catch_event.get('id')) if not incoming: raise ValidationException( 'Intermediate Catch Event has no incoming sequences. ' 'This might be a Boundary Event that has been ' 'disconnected.', node=catch_event, filename=filename) def _fix_call_activities_signavio(self, bpmn, filename): """ Signavio produces slightly invalid BPMN for call activity nodes... It is supposed to put a reference to the id of the called process in to the calledElement attribute. Instead it stores a string (which is the name of the process - not its ID, in our interpretation) in an extension tag. This code gets the name of the 'subprocess reference', finds a process with a matching name, and sets the calledElement attribute to the id of the process. """ for node in xpath_eval(bpmn)(".//bpmn:callActivity"): calledElement = node.get('calledElement', None) if not calledElement: signavioMetaData = xpath_eval(node, extra_ns={ 'signavio': SIGNAVIO_NS})( './/signavio:signavioMetaData[@metaKey="entry"]') if not signavioMetaData: raise ValidationException( 'No Signavio "Subprocess reference" specified.', node=node, filename=filename) subprocess_reference = one(signavioMetaData).get('metaValue') matches = [] for b in list(self.bpmn.values()): for p in xpath_eval(b)(".//bpmn:process"): if (p.get('name', p.get('id', None)) == subprocess_reference): matches.append(p) if not matches: raise ValidationException( "No matching process definition found for '%s'." % subprocess_reference, node=node, filename=filename) if len(matches) != 1: raise ValidationException( "More than one matching process definition " " found for '%s'." % subprocess_reference, node=node, filename=filename) node.set('calledElement', matches[0].get('id')) def _call_editor_hook(self, hook, *args, **kwargs): if self.editor: hook_func = getattr(self, "%s_%s" % (hook, self.editor), None) if hook_func: return hook_func(*args, **kwargs) return None def package_for_editor_signavio(self, spec, filename): """ Adds the SVG files to the archive for this BPMN file. """ signavio_file = filename[:-len('.bpmn20.xml')] + '.signavio.xml' if os.path.exists(signavio_file): self.write_file_to_package_zip( "src/" + self._get_zip_path(signavio_file), signavio_file) f = open(signavio_file, 'r') try: signavio_tree = etree.parse(f) finally: f.close() svg_node = one(signavio_tree.findall('.//svg-representation')) self.write_to_package_zip("%s.svg" % spec.name, svg_node.text) def write_meta_data(self): """ Writes the metadata.ini file to the archive. """ config = configparser.ConfigParser() config.add_section('MetaData') config.set('MetaData', 'entry_point_process', self.wf_spec.name) if self.editor: config.set('MetaData', 'editor', self.editor) for k, v in self.meta_data: config.set('MetaData', k, v) if not self.PARSER_CLASS == BpmnParser: config.set('MetaData', 'parser_class_module', inspect.getmodule(self.PARSER_CLASS).__name__) config.set('MetaData', 'parser_class', self.PARSER_CLASS.__name__) ini = StringIO() config.write(ini) self.write_to_package_zip(self.METADATA_FILE, ini.getvalue()) def _get_zip_path(self, filename): p = os.path.abspath(filename)[ len(self.input_path_prefix):].replace(os.path.sep, '/') while p.startswith('/'): p = p[1:] return p @classmethod def get_version(cls): try: import pkg_resources # part of setuptools version = pkg_resources.require("SpiffWorkflow")[0].version except Exception: version = 'DEV' return version @classmethod def create_option_parser(cls): """ Override in subclass if required. """ return OptionParser( usage=("%prog [options] -o <package file> -p " "<entry point process> <input BPMN files ...>"), version="SpiffWorkflow BPMN Packager %s" % (cls.get_version())) @classmethod def add_main_options(cls, parser): """ Override in subclass if required. """ parser.add_option("-o", "--output", dest="package_file", help="create the BPMN package in the specified file") parser.add_option("-p", "--process", dest="entry_point_process", help="specify the entry point process") parser.add_option("-c", "--config-file", dest="config_file", help="specify a config file to use") parser.add_option( "-i", "--initialise-config-file", action="store_true", dest="init_config_file", default=False, help="create a new config file from the specified options") group = OptionGroup(parser, "BPMN Editor Options", "These options are not required, but may be " " provided to activate special features of " "supported BPMN editors.") group.add_option("--editor", dest="editor", help="editors with special support: signavio") parser.add_option_group(group) @classmethod def add_additional_options(cls, parser): """ Override in subclass if required. """ group = OptionGroup(parser, "Target Engine Options", "These options are not required, but may be " "provided if a specific " "BPMN application engine is targeted.") group.add_option("-e", "--target-engine", dest="target_engine", help="target the specified BPMN application engine") group.add_option( "-t", "--target-version", dest="target_engine_version", help="target the specified version of the BPMN application engine") parser.add_option_group(group) @classmethod def check_args(cls, config, options, args, parser, package_file=None): """ Override in subclass if required. """ if not args: parser.error("no input files specified") if not (package_file or options.package_file): parser.error("no package file specified") if not options.entry_point_process: parser.error("no entry point process specified") @classmethod def merge_options_and_config(cls, config, options, args): """ Override in subclass if required. """ if args: config.set(CONFIG_SECTION_NAME, 'input_files', ','.join(args)) elif config.has_option(CONFIG_SECTION_NAME, 'input_files'): for i in config.get(CONFIG_SECTION_NAME, 'input_files').split(','): if not os.path.isabs(i): i = os.path.abspath( os.path.join(os.path.dirname(options.config_file), i)) args.append(i) cls.merge_option_and_config_str('package_file', config, options) cls.merge_option_and_config_str('entry_point_process', config, options) cls.merge_option_and_config_str('target_engine', config, options) cls.merge_option_and_config_str( 'target_engine_version', config, options) cls.merge_option_and_config_str('editor', config, options) @classmethod def merge_option_and_config_str(cls, option_name, config, options): """ Utility method to merge an option and config, with the option taking " precedence """ opt = getattr(options, option_name, None) if opt: config.set(CONFIG_SECTION_NAME, option_name, opt) elif config.has_option(CONFIG_SECTION_NAME, option_name): setattr(options, option_name, config.get( CONFIG_SECTION_NAME, option_name)) @classmethod def create_meta_data(cls, options, args, parser): """ Override in subclass if required. """ meta_data = [] meta_data.append(('spiff_version', cls.get_version())) if options.target_engine: meta_data.append(('target_engine', options.target_engine)) if options.target_engine: meta_data.append( ('target_engine_version', options.target_engine_version)) return meta_data @classmethod def main(cls, argv=None, package_file=None): parser = cls.create_option_parser() cls.add_main_options(parser) cls.add_additional_options(parser) (options, args) = parser.parse_args(args=argv) config = configparser.ConfigParser() if options.config_file: config.read(options.config_file) if not config.has_section(CONFIG_SECTION_NAME): config.add_section(CONFIG_SECTION_NAME) cls.merge_options_and_config(config, options, args) if options.init_config_file: if not options.config_file: parser.error( "no config file specified - cannot initialise config file") f = open(options.config_file, "w") with f: config.write(f) return cls.check_args(config, options, args, parser, package_file) meta_data = cls.create_meta_data(options, args, parser) packager = cls(package_file=package_file or options.package_file, entry_point_process=options.entry_point_process, meta_data=meta_data, editor=options.editor) for a in args: packager.add_bpmn_files_by_glob(a) packager.create_package() return packager def main(packager_class=None): """ :param packager_class: The Packager class to use. Default: Packager. """ if not packager_class: packager_class = Packager packager_class.main() if __name__ == '__main__': main()
PypiClean
/EnergyPlusRegressionTool-1.9.6-py3-none-any.whl/epregressions/diffs/thresh_dict.py
# Copyright (C) 2013 Amir Roth # This file is part of mathdiff. # # mathdiff is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # mathdiff is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with mathdiff. If not, see <http://www.gnu.org/licenses/>. # VERSION: 1.3 __author__ = "Amir Roth (amir dot roth at ee dot doe dot gov)" __version__ = "1.4" __copyright__ = "Copyright (c) 2013 Amir Roth" __license__ = "GNU General Public License Version 3" import re # Load threshold dictionary from math_diff.config file class ThreshDict(object): def __init__(self, tdname): self.thresholds = {} f = open(tdname, 'r') while f: line = f.readline().rstrip('\n') line = line.strip() if line == '': break # Ignore comment lines if line[0] == '#': continue # noinspection PyBroadException try: # Split off end-of-line comments if line.find('#') > -1: line = line[:line.find('#')] [unit, agg, abs_thresh, rel_thresh] = [x.strip() for x in re.split('[,=]', line) if x != ''] tag = unit + '|' + agg if tag in self.thresholds: # 'Over-riding existing entry for %s in threshold dictionary math_diff.config' % tag, pass self.thresholds[tag] = (float(abs_thresh), float(rel_thresh)) except Exception: # print('Skipping line <%s> because %s' % (line, str(exc)), file=sys.stderr) pass f.close() def lookup(self, hstr): # Lookup a threshold value in the dictionary using a report column # header string and a differencing type (relative or absolute) if hstr == 'Date/Time' or hstr == 'Time': return 0.0, 0.0 if hstr == 'Version ID': # allow version number changes to pass without throwing table diffs return 100.0, 100.0 # Parse hstr (column header) to extract Unit and Aggregation # noinspection PyBroadException try: if hstr.find('[]') == -1 and hstr.find('[') > -1: tokens = [x.strip() for x in re.split(r'[\[\]]', hstr) if x.strip() != ''] unit = tokens[1] if len(tokens) > 1 else tokens[0] else: unit = '*' if hstr.find('{}') == -1 and hstr.find('{') > -1: tokens = [x.strip() for x in re.split(r'[{\}]', hstr) if x.strip() != ''] agg = tokens[1] if len(tokens) > 1 else tokens[0] else: agg = '*' except: # pragma: no cover - I could not figure out how to get an exception # print >> sys.stderr, 'PROBLEM: cannot figure out unit/aggregation for ' + hstr + ', defaulting to *,*' unit = '*' agg = '*' tag = unit + '|' + agg tag_d1 = unit + '|*' tag_d2 = '*|*' # Look for matching Quantity and Aggregation if tag in self.thresholds: return self.thresholds[tag] # Then just matching Quantity elif tag_d1 in self.thresholds: return self.thresholds[tag_d1] # Then the global default elif tag_d2 in self.thresholds: return self.thresholds[tag_d2] else: return 0.0, 0.0
PypiClean
/Djblets-3.3.tar.gz/Djblets-3.3/docs/releasenotes/0.8-beta-1.rst
================================ Djblets 0.8 Beta 1 Release Notes ================================ **Release date**: February 20, 2014 Compatibility ============= * This release of Djblets should be compatible with Python 3. * This release requires Django 1.6.2 or higher. * Pillow is now supported as an alternative to PIL. We recommend uninstalling PIL and switching entirely to Pillow, as it's better maintained and easier to install. * Unit tests can now be run when the source code is checked out on a Windows host but run within a Linux VM. Patch by Tomi Äijö. Internationalization ==================== * Translations can now be built on a system without an installed copy of Djblets. Patch by Stephen Gallagher. djblets.datagrid ================ * Pagination links now include any query parameters used to generate the view of the datagrid. (:bug:`1155`) djblets.db.fields ================= * Support updating many fields at once using :py:class:`CounterField`. :py:class:`CounterField` has two new class methods: :py:meth:`increment_many`, and :py:meth:`decrement_many`. Theses can take a list of fields on a model instance and update them in the same query. * :py:class:`CounterField` initializers can now handle their own instance updating. Previously, :py:class:`CounterField` initializer functions had to return a result, which would be used in a query to update the instance. Now, they can do the update themselves (potentially using the new :py:meth:`increment_many`) and return None to tell :py:class:`CounterField` not to do its own update. * :py:class:`CounterField` now only updates the affected fields when saving the model instance. djblets.extensions ================== * Extensions can now implement :py:meth:`initialize` instead of :py:meth:`__init__`. Previously, :py:class:`Extension` subclasses had to override :py:meth:`__init__` and call the parent function before doing any initialization work. Now, they can simply override 'initialize'. They don't need to call the parent. * Static media bundles can now apply to specific pages. An ``apply_to`` attribute in a static bundle definition can be set to a list of URL names that the bundle should render on. Those pages will automatically load any bundles listing that page's URL name. * Improved JavaScript extension support. In previous alphas of 0.8, JavaScript extensions could be defined by filling in the :py:attr:`Extension.js_model_class` attribute. That's now been replaced by a new :py:class:`JSExtension` class. Extensions can define a :py:attr:`js_extensions` attribute and set it to a list of :py:class:`JSExtension` subclasses. Each subclass can define a JavaScript extension that can appear on one or more pages, by defining the :py:attr:`js_model_class` and :py:attr:`apply_to` attributes. * Added :py:class:`SignalHook`. :py:class:`SignalHook` connects to Django signals and keeps that connection bound to the lifecycle of the extension. When the extension is disabled, the signal is automatically disconnected. * Added :py:class:`DataGridColumnsHook`. :py:class:`DataGridColumnsHook` allows extensions to register custom columns on any datagrid. * Simplified :py:class:`TemplateHook`. :py:class:`TemplateHook` is now simpler and faster, and potentially less buggy. * Extension hooks can no longer be shut down twice when disabling down an extension. djblets.webapi ============== * Subclasses of :py:class:`RootResource` can now provide custom payload data. * Fixed timestamp inconsistencies in the payloads. The serialized timestamps in a payload are now consistent whether they were populated from a database or from code. Previously, they would contain extra precision if populated from code that they otherwise wouldn't have if populated form the database. jquery.gravy.inlineEditor ========================= * Hitting :kbd:`Enter` on an auto-completed ``inlineEditor`` now properly populates the field. (:bug:`2779`) * Added a :js:func:`setValue` function to ``inlineEditor``. Patch by Natasha Dalal. Contributors ============ * Christian Hammond * David Trowbridge * Natasha Dalal * Stephen Gallagher * Tomi Äijö
PypiClean
/Museparation-0.0.1a15.tar.gz/Museparation-0.0.1a15/museparation/waveunet/model/resample.py
import numpy as np import torch from torch import nn as nn from torch.nn import functional as F class Resample1d(nn.Module): def __init__(self, channels, kernel_size, stride, transpose=False, padding="reflect", trainable=False): ''' Creates a resampling layer for time series data (using 1D convolution) - (N, C, W) input format :param channels: Number of features C at each time-step :param kernel_size: Width of sinc-based lowpass-filter (>= 15 recommended for good filtering performance) :param stride: Resampling factor (integer) :param transpose: False for down-, true for upsampling :param padding: Either "reflect" to pad or "valid" to not pad :param trainable: Optionally activate this to train the lowpass-filter, starting from the sinc initialisation ''' super(Resample1d, self).__init__() self.padding = padding self.kernel_size = kernel_size self.stride = stride self.transpose = transpose self.channels = channels cutoff = 0.5 / stride assert(kernel_size > 2) assert ((kernel_size - 1) % 2 == 0) assert(padding == "reflect" or padding == "valid") filter = build_sinc_filter(kernel_size, cutoff) self.filter = torch.nn.Parameter(torch.from_numpy(np.repeat(np.reshape(filter, [1, 1, kernel_size]), channels, axis=0)), requires_grad=trainable) def forward(self, x): # Pad here if not using transposed conv input_size = x.shape[2] if self.padding != "valid": num_pad = (self.kernel_size-1)//2 out = F.pad(x, (num_pad, num_pad), mode=self.padding) else: out = x # Lowpass filter (+ 0 insertion if transposed) if self.transpose: expected_steps = ((input_size - 1) * self.stride + 1) if self.padding == "valid": expected_steps = expected_steps - self.kernel_size + 1 out = F.conv_transpose1d(out, self.filter, stride=self.stride, padding=0, groups=self.channels) diff_steps = out.shape[2] - expected_steps if diff_steps > 0: assert(diff_steps % 2 == 0) out = out[:,:,diff_steps//2:-diff_steps//2] else: assert(input_size % self.stride == 1) out = F.conv1d(out, self.filter, stride=self.stride, padding=0, groups=self.channels) return out def get_output_size(self, input_size): ''' Returns the output dimensionality (number of timesteps) for a given input size :param input_size: Number of input time steps (Scalar, each feature is one-dimensional) :return: Output size (scalar) ''' assert(input_size > 1) if self.transpose: if self.padding == "valid": return ((input_size - 1) * self.stride + 1) - self.kernel_size + 1 else: return ((input_size - 1) * self.stride + 1) else: assert(input_size % self.stride == 1) # Want to take first and last sample if self.padding == "valid": return input_size - self.kernel_size + 1 else: return input_size def get_input_size(self, output_size): ''' Returns the input dimensionality (number of timesteps) for a given output size :param input_size: Number of input time steps (Scalar, each feature is one-dimensional) :return: Output size (scalar) ''' # Strided conv/decimation if not self.transpose: curr_size = (output_size - 1)*self.stride + 1 # o = (i-1)//s + 1 => i = (o - 1)*s + 1 else: curr_size = output_size # Conv if self.padding == "valid": curr_size = curr_size + self.kernel_size - 1 # o = i + p - k + 1 # Transposed if self.transpose: assert ((curr_size - 1) % self.stride == 0)# We need to have a value at the beginning and end curr_size = ((curr_size - 1) // self.stride) + 1 assert(curr_size > 0) return curr_size def build_sinc_filter(kernel_size, cutoff): # FOLLOWING https://www.analog.com/media/en/technical-documentation/dsp-book/dsp_book_Ch16.pdf # Sinc lowpass filter # Build sinc kernel assert(kernel_size % 2 == 1) M = kernel_size - 1 filter = np.zeros(kernel_size, dtype=np.float32) for i in range(kernel_size): if i == M//2: filter[i] = 2 * np.pi * cutoff else: filter[i] = (np.sin(2 * np.pi * cutoff * (i - M//2)) / (i - M//2)) * \ (0.42 - 0.5 * np.cos((2 * np.pi * i) / M) + 0.08 * np.cos(4 * np.pi * M)) filter = filter / np.sum(filter) return filter
PypiClean
/123_object_detection-0.1.tar.gz/123_object_detection-0.1/object_detection/core/freezable_sync_batch_norm.py
"""A freezable batch norm layer that uses Keras sync batch normalization.""" import tensorflow as tf class FreezableSyncBatchNorm(tf.keras.layers.experimental.SyncBatchNormalization ): """Sync Batch normalization layer (Ioffe and Szegedy, 2014). This is a `freezable` batch norm layer that supports setting the `training` parameter in the __init__ method rather than having to set it either via the Keras learning phase or via the `call` method parameter. This layer will forward all other parameters to the Keras `SyncBatchNormalization` layer This is class is necessary because Object Detection model training sometimes requires batch normalization layers to be `frozen` and used as if it was evaluation time, despite still training (and potentially using dropout layers) Like the default Keras SyncBatchNormalization layer, this will normalize the activations of the previous layer at each batch, i.e. applies a transformation that maintains the mean activation close to 0 and the activation standard deviation close to 1. Input shape: Arbitrary. Use the keyword argument `input_shape` (tuple of integers, does not include the samples axis) when using this layer as the first layer in a model. Output shape: Same shape as input. References: - [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/abs/1502.03167) """ def __init__(self, training=None, **kwargs): """Constructor. Args: training: If False, the layer will normalize using the moving average and std. dev, without updating the learned avg and std. dev. If None or True, the layer will follow the keras SyncBatchNormalization layer strategy of checking the Keras learning phase at `call` time to decide what to do. **kwargs: The keyword arguments to forward to the keras SyncBatchNormalization layer constructor. """ super(FreezableSyncBatchNorm, self).__init__(**kwargs) self._training = training def call(self, inputs, training=None): # Override the call arg only if the batchnorm is frozen. (Ignore None) if self._training is False: # pylint: disable=g-bool-id-comparison training = self._training return super(FreezableSyncBatchNorm, self).call(inputs, training=training)
PypiClean
/AutoAiLib-1.1.0.tar.gz/AutoAiLib-1.1.0/README.md
# AutoAI This repository is a compilation of scripts that I have created in my time working with machine learning. These scripts aim to automate the annoying and tedious parts of ML, allowing you to focus on what is important. PyPi: https://pypi.org/project/AutoAILib/ </br> $ pip install autoailib </br> This library was developed for and used with keras convolutional neural networks. They do however work with other keras models, besides image test obviously. <div class="entry"> <h1> AutoAiLib.general_tester(model path or object, labels, preprocessor)</h1> <a href="https://youtu.be/TQisVhgUzWo"> Class Video Demo</a> <h2> AutoAiLib.general_tester.predict_single(example)</h2> <ul><li>example- If you have defined a preprocessor for your tester, this should comply with the preprocessor's argument. If you have not defined a preprocessor, example must be in a form that your model will accept.</li></ul> <h2> AutoAiLib.general_tester.predict_many(container=None, testing_folder = None, csv_dir)</h2> <ul> <li> container - This can be a container of test objects (any iterable). If preprocessor is defined, these objects must comply with the preprocessors parameter. Otherwise they must be in a form that your model will accept.</li> <li> testing_dir - This can be a path to a testing folder which has sub folders of all classes. Again, must be preprocessed or have preprocessor defined.</li> <li> csv_dir - This function compiles data into a csv folder to allow users to easily extract data from it, if not defined it will return a pandas data frame.</li> </ul> </div> <div class="entry"> <h1> AutoAi.convnet_tester(model path or object, labels) </h1> <a href="https://youtu.be/sM57JDasREk"> Class Video Demo </a> <h2> AutoAi.image_predict(model_path, image_path, labels)</h2> <h5> This function takes 3 arguments: a path to a keras model, a path to an image, and a list of labels.</h5> <h5> Demo:</h5> Given a the correct arguments, we get the following output, as well as this image saved to our model directory. <img src="https://i.imgur.com/woiPdus.png"></img> <h2> AutoAi.manual_test(model, testing_dir, labels) </h2> <h5> This function tests a model given labels and testing data. It then compiles the results in a CSV file, and groups the results by class, and by correct and incorrect.</h5> <ul> <li> Model - Path of model that you want to test or model object.</li> <li> Testing_dir - Path to the directory with your testing data.</li> <li> Labels - Dictionary of the classes, in form (index:class_name)</li> </ul> <h5>Example csv:</h5> <img src="https://i.imgur.com/g4gNQjS.png"></img> </div> <div class="entry"> <h2>Update! This has now been packaged in the AutoAI.data_compiler class. AutoAi.data_compiler(self,src, dest, **kwargs)</br> AutoAi.data_compiler.run() will compile the data based on the constructor parameters. </h2> <h5> This function takes 2 required arguments, an original data source file, and a path to the desired data directory. Given just these two arguments, this function will create a new testing data folder at dest with training, validation, and testing folders, containing folders for each class. You can alter the ratio with the ratio arguments, as well as provide a number of img transforms to do if you are using images.</h5> <ul> <li> Src - Path to a folder that contains a folder for each class and then data examples in those class folders. </li> <li> Dest - Path to a folder where you want the data to end up. </li> <li> Num_imgs_per_class - This number of images will be added to the original set for each class through transforms. The theoretical limit for this would be 3! * original images per class </li> </ul> <h5> Demo:</h5> Given the a path to the following folder: <img src="https://i.imgur.com/SSpydEv.png"></img> If augmentation used the following results will be yielded: <img src="https://i.imgur.com/4okyMrN.png"></img> Then these images will be copied to the dest folder with copied file structure, but an added upper layer: <img src="https://i.imgur.com/TY7HvL4.png"</img> Example showing the images made it: <img src="https://i.imgur.com/3ily5dU.png"</img> </div> <div class="entry"> <h2>Homeless Methods:</h2> <h4> model_to_img(model_path) </h4> <ul> <li>Returns an image form of your model.</li> </ul> <h4> plot(history=None, file=None, min_=0, max_=1)</h4> <ul><li>history- numpy file (Keras callback)</li> <li>file - path to a .npy file.</li> <li>min_ - the minimum of accuracy/loss in the graph</li> <li>max_ - the maximum of accuracy/loss in the graph, the closer the min and max, the more zoomed your graph will be</li> </ul> </div>
PypiClean
/Diofant-0.14.0a2.tar.gz/Diofant-0.14.0a2/diofant/domains/finitefield.py
from __future__ import annotations import numbers import random from ..core import Dummy, integer_digits from ..ntheory import factorint, is_primitive_root, isprime from ..polys.polyerrors import CoercionFailed from .field import Field from .groundtypes import DiofantInteger from .integerring import GMPYIntegerRing, PythonIntegerRing, ZZ_python from .quotientring import QuotientRingElement from .ring import CommutativeRing from .simpledomain import SimpleDomain class IntegerModRing(CommutativeRing, SimpleDomain): """General class for quotient rings over integers.""" is_Numerical = True def __new__(cls, order, dom): if isprime(order): return dom.finite_field(order) mod = dom.convert(order) key = cls, order, dom obj = super().__new__(cls) obj.domain = dom obj.mod = mod obj.order = order obj.rep = f'IntegerModRing({obj.order})' try: obj.dtype = _modular_integer_cache[key] except KeyError: obj.dtype = type('ModularInteger', (ModularInteger,), {'mod': mod, 'domain': dom, '_parent': obj}) _modular_integer_cache[key] = obj.dtype obj.zero = obj.dtype(0) obj.one = obj.dtype(1) return obj def __hash__(self): return hash((self.__class__.__name__, self.dtype, self.order, self.domain)) def __eq__(self, other): return isinstance(other, self.__class__) and \ self.order == other.order and self.domain == other.domain def __getnewargs_ex__(self): return (self.order,), {} @property def characteristic(self): return self.order def to_expr(self, element): return DiofantInteger(int(element)) def from_expr(self, expr): if expr.is_Integer: return self.dtype(self.domain.dtype(int(expr))) elif expr.is_Float and int(expr) == expr: return self.dtype(self.domain.dtype(int(expr))) else: raise CoercionFailed(f'expected an integer, got {expr}') def _from_PythonFiniteField(self, a, K0=None): return self.dtype(self.domain.convert(a.rep, K0.domain)) _from_GMPYFiniteField = _from_PythonFiniteField def _from_PythonIntegerRing(self, a, K0=None): return self.dtype(self.domain.convert(a, K0) % self.characteristic) _from_GMPYIntegerRing = _from_PythonIntegerRing def _from_PythonRationalField(self, a, K0=None): if a.denominator == 1: return self.convert(a.numerator) _from_GMPYRationalField = _from_PythonRationalField def _from_RealField(self, a, K0): p, q = K0.to_rational(a) if q == 1: return self.dtype(self.domain.dtype(p)) def is_normal(self, a): return True class FiniteField(Field, IntegerModRing): """General class for finite fields.""" is_FiniteField = True def __new__(cls, order, dom, modulus=None): try: pp = factorint(order) if not order or len(pp) != 1: raise ValueError mod, deg = pp.popitem() except ValueError as exc: raise ValueError('order must be a prime power, ' f'got {order}') from exc if deg == 1: if modulus: deg = len(modulus) - 1 else: modulus = [0, 1] order = mod**deg if modulus is None: random.seed(0) ring = ZZ_python.finite_field(mod).inject(Dummy('x')) modulus = ring._gf_random(deg, irreducible=True).all_coeffs() elif deg != len(modulus) - 1: raise ValueError('degree of a defining polynomial for the field' ' does not match extension degree') modulus = tuple(map(dom.dtype, modulus)) mod = dom.convert(mod) key = cls, order, dom, mod, modulus obj = super(IntegerModRing, cls).__new__(cls) # pylint: disable=bad-super-call obj.domain = dom obj.mod = mod obj.order = order if order > mod: obj.rep = f'GF({obj.mod}, {list(map(ZZ_python, modulus))})' else: obj.rep = f'GF({obj.mod})' try: obj.dtype = _modular_integer_cache[key] except KeyError as exc: if deg == 1: obj.dtype = type('ModularInteger', (ModularInteger,), {'mod': mod, 'domain': dom, '_parent': obj}) else: ff = dom.finite_field(mod).inject(Dummy('x')) mod = ff.from_list(modulus) if not mod.is_irreducible: raise ValueError('defining polynomial must be ' 'irreducible') from exc obj.dtype = type('GaloisFieldElement', (GaloisFieldElement,), {'mod': mod, 'domain': ff, '_parent': obj}) _modular_integer_cache[key] = obj.dtype obj.zero = obj.dtype(0) obj.one = obj.dtype(1) return obj @property def characteristic(self): return self.mod _modular_integer_cache: dict[tuple, IntegerModRing] = {} class PythonIntegerModRing(IntegerModRing): """Quotient ring based on Python's integers.""" def __new__(cls, order): return super().__new__(cls, order, PythonIntegerRing()) class GMPYIntegerModRing(IntegerModRing): """Quotient ring based on GMPY's integers.""" def __new__(cls, order): return super().__new__(cls, order, GMPYIntegerRing()) class PythonFiniteField(FiniteField): """Finite field based on Python's integers.""" def __new__(cls, order, modulus=None): return super().__new__(cls, order, PythonIntegerRing(), modulus) class GMPYFiniteField(FiniteField): """Finite field based on GMPY's integers.""" def __new__(cls, order, modulus=None): return super().__new__(cls, order, GMPYIntegerRing(), modulus) class ModularInteger(QuotientRingElement): """A class representing a modular integer.""" @property def numerator(self): return self @property def denominator(self): return self.parent.one @property def is_primitive(self): """Test if this is a primitive element.""" parent = self.parent return is_primitive_root(int(self), parent.order) class GaloisFieldElement(ModularInteger): """A class representing a Galois field element.""" def __init__(self, rep): if isinstance(rep, numbers.Integral): rep = list(reversed(integer_digits(rep % self.parent.order, self.parent.mod))) if isinstance(rep, (list, tuple)): rep = self.domain.from_list(rep) super().__init__(rep) def __int__(self): rep = self.rep.set_domain(self.parent.domain) return int(rep(self.parent.mod)) @property def is_primitive(self): parent = self.parent p = parent.characteristic f = self.rep domain = self.domain x = domain.gens[0] n = f.degree() if not (f.is_irreducible and n): return False t = x**n for _ in range(n, p**n - 1): r = t % f if r == 1: return False t = r*x return True
PypiClean
/Nikola-8.2.4-py3-none-any.whl/nikola/data/samplesite/pages/creating-a-theme.rst
.. title: Creating a Theme .. slug: creating-a-theme .. date: 2015-05-28 18:46:48 UTC .. tags: .. category: .. link: .. description: .. type: text Nikola is a static site and blog generator. So is Jekyll. While I like what we have done with Nikola, I do admit that Jekyll (and others!) have many more, and nicer themes than Nikola does. This document is an attempt at making it easier for 3rd parties (that means *you* people! ;-) to create themes. Since I **suck** at designing websites, I asked for opinions on themes to port, and got some feedback. Since this is **Not So Hard™**, I will try to make time to port a few and see what happens. If you are looking for a reference, check out :doc:`Theming reference <theming>` and `Template variables <https://getnikola.com/template-variables.html>`_. Today’s theme is `Lanyon <https://github.com/poole/lanyon>`__ which is written by `@mdo <https://twitter.com/mdo>`__ and released under a MIT license, which is liberal enough. So, let’s get started. Checking It Out --------------- The first step in porting a theme is making the original theme work. Lanyon is awesome in that its `GitHub project <https://github.com/poole/lanyon>`__ is a full site! So:: # Get jekyll sudo apt-get install jekyll # Get Lanyon git clone git@github.com:poole/lanyon.git # Build it cd lanyon && jekyll build # Look at it jekyll serve & google-chrome http://localhost:4000 If you **do not want to install Jekyll**, you can also see it in action at https://lanyon.getpoole.com/ Some things jump to my mind: 1. This is one fine looking theme 2. Very clear and readable 3. Nice hidden navigation-thingy Also, from looking at `the project’s README <https://github.com/poole/lanyon/blob/master/README.md>`__ it supports some nice configuration options: 1. Color schemes 2. Reverse layout 3. Sidebar overlay instead of push 4. Open the sidebar by default, or on a per-page basis by using its metadata Let’s try to make all those nice things survive the porting. Starting From Somewhere ----------------------- Nikola has a nice, clean, base theme from which you can start when writing your own theme. Why start from that instead of from a clean slate? Because theme inheritance is going to save you a ton of work, that’s why. If you start from scratch you won’t be able to build **anything** until you have a bunch of templates written. Starting from base, you just need to hack on the things you **need** to change. First, we create a site with some content in it. We’ll use the ``nikola init`` wizard (with the ``--demo`` option) for that:: $ nikola init --demo lanyon-port Creating Nikola Site ==================== This is Nikola v7.8.0. We will now ask you a few easy questions about your new site. If you do not want to answer and want to go with the defaults instead, simply restart with the `-q` parameter. --- Questions about the site --- Site title [My Nikola Site]: Site author [Nikola Tesla]: Site author's e-mail [n.tesla@example.com]: Site description [This is a demo site for Nikola.]: Site URL [https://example.com/]: --- Questions about languages and locales --- We will now ask you to provide the list of languages you want to use. Please list all the desired languages, comma-separated, using ISO 639-1 codes. The first language will be used as the default. Type '?' (a question mark, sans quotes) to list available languages. Language(s) to use [en]: Please choose the correct time zone for your blog. Nikola uses the tz database. You can find your time zone here: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones Time zone [UTC]: Current time in UTC: 16:02:07 Use this time zone? [Y/n] --- Questions about comments --- You can configure comments now. Type '?' (a question mark, sans quotes) to list available comment systems. If you do not want any comments, just leave the field blank. Comment system: That's it, Nikola is now configured. Make sure to edit conf.py to your liking. If you are looking for themes and addons, check out https://themes.getnikola.com/ and https://plugins.getnikola.com/. Have fun! [2015-05-28T16:02:08Z] INFO: init: A new site with example data has been created at lanyon-port. [2015-05-28T16:02:08Z] INFO: init: See README.txt in that folder for more information. Then, we create an empty theme inheriting from base. This theme will use Mako templates. If you prefer Jinja2, then you should use ``base-jinja`` as a parent and ``jinja`` as engine instead:: $ cd lanyon-port/ $ nikola theme -n lanyon --parent base --engine mako Edit ``conf.py`` and set ``THEME = 'lanyon'``. Also set ``USE_BUNDLES = False`` (just do it for now, we’ll get to bundles later). Also, if you intend to publish your theme on the Index, or want to use it with older versions (v7.8.5 or older), use the ``--legacy-meta`` option for ``nikola theme -n``. You can now build that site using ``nikola build`` and it will look like this: .. figure:: https://getnikola.com/images/lanyon-0.thumbnail.png :target: https://getnikola.com/images/lanyon-0.png This is just the base theme. Basic CSS --------- The next step is to know exactly how Lanyon’s pages work. To do this, we read its HTML. First let’s look at the head element: .. code:: html <!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en-us"> <head> <link href="https://gmpg.org/xfn/11" rel="profile"> <meta http-equiv="content-type" content="text/html; charset=utf-8"> <!-- Enable responsiveness on mobile devices--> <meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1"> <title> Lanyon &middot; A Jekyll theme </title> <!-- CSS --> <link rel="stylesheet" href="/public/css/poole.css"> <link rel="stylesheet" href="/public/css/syntax.css"> <link rel="stylesheet" href="/public/css/lanyon.css"> <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=PT+Serif:400,400italic,700|PT+Sans:400"> <!-- Icons --> <link rel="apple-touch-icon-precomposed" sizes="144x144" href="/public/apple-touch-icon-144-precomposed.thumbnail.png"> <link rel="shortcut icon" href="/public/favicon.ico"> <!-- RSS --> <link rel="alternate" type="application/rss+xml" title="RSS" href="/atom.xml"> <!-- Google Analytics --> [...] </head> The interesting part there is that it loads a few CSS files. If you check the source of your Nikola site, you will see something fairly similar: .. code:: html <!DOCTYPE html> <html prefix="og: http://ogp.me/ns# article: http://ogp.me/ns/article# " vocab="http://ogp.me/ns" lang="en"> <head> <meta charset="utf-8"> <meta name="description" content="This is a demo site for Nikola."> <meta name="viewport" content="width=device-width"> <title>My Nikola Site | My Nikola Site</title> <link href="assets/css/rst_base.css" rel="stylesheet" type="text/css"> <link href="assets/css/code.css" rel="stylesheet" type="text/css"> <link href="assets/css/theme.css" rel="stylesheet" type="text/css"> <link rel="alternate" type="application/rss+xml" title="RSS" href="rss.xml"> <link rel="canonical" href="https://example.com/index.html"> <!--[if lt IE 9]><script src="assets/js/html5.js"></script><![endif]--><link rel="prefetch" href="posts/welcome-to-nikola.html" type="text/html"> </head> Luckily, since this is all under a very liberal license, we can just copy these CSS files into Nikola, adapting the paths a little so that they follow our conventions:: $ mkdir -p themes/lanyon/assets/css $ cp ../lanyon/public/css/poole.css themes/lanyon/assets/css/ $ cp ../lanyon/public/css/lanyon.css themes/lanyon/assets/css/ Notice I am *not* copying ``syntax.css``? That’s because Nikola handles that styles for syntax highlighting in a particular way, using a setting called ``CODE_COLOR_SCHEME`` where you can configure what color scheme the syntax highlighter uses. You can use your own ``assets/css/code.css`` if you don’t like the provided ones. Nikola **requires** ``assets/css/rst_base.css`` and ``assets/css/code.css`` to function properly. We will also add themes for Jupyter (``assets/css/ipython.min.css`` and ``assets/css/nikola_ipython.css``) into the template; note that they are activated only if you configured your ``POSTS``/``PAGES`` with ipynb support. There’s also ``assets/css/nikola_rst.css``, which adds Bootstrap 3-style reST notes etc. But how do I tell **our** lanyon theme to use those CSS files instead of whatever it’s using now? By giving our theme its own base_helper.tmpl. That file is a **template** used to generate parts of the pages. It’s large and complicated but we don’t need to change a lot of it. First, make a copy in your theme (note this command requires setting your ``THEME`` in ``conf.py`` to ``lanyon``):: $ nikola theme -c base_helper.tmpl The part we want to change is this: .. code:: html+mako <%def name="html_stylesheets()"> %if use_bundles: %if use_cdn: <link href="/assets/css/all.css" rel="stylesheet" type="text/css"> %else: <link href="/assets/css/all-nocdn.css" rel="stylesheet" type="text/css"> %endif %else: <link href="/assets/css/rst_base.css" rel="stylesheet" type="text/css"> <link href="/assets/css/nikola_rst.css" rel="stylesheet" type="text/css"> <link href="/assets/css/code.css" rel="stylesheet" type="text/css"> <link href="/assets/css/theme.css" rel="stylesheet" type="text/css"> %if has_custom_css: <link href="/assets/css/custom.css" rel="stylesheet" type="text/css"> %endif %endif % if needs_ipython_css: <link href="/assets/css/ipython.min.css" rel="stylesheet" type="text/css"> <link href="/assets/css/nikola_ipython.css" rel="stylesheet" type="text/css"> % endif </%def> And we will change it so it uses the lanyon styles instead of theme.css (again, ignore the bundles for now!): .. code:: html+mako <%def name="html_stylesheets()"> %if use_bundles: <link href="/assets/css/all.css" rel="stylesheet" type="text/css"> %else: <link href="/assets/css/rst_base.css" rel="stylesheet" type="text/css"> <link href="/assets/css/nikola_rst.css" rel="stylesheet" type="text/css"> <link href="/assets/css/poole.css" rel="stylesheet" type="text/css"> <link href="/assets/css/lanyon.css" rel="stylesheet" type="text/css"> <link href="/assets/css/code.css" rel="stylesheet" type="text/css"> %if has_custom_css: <link href="/assets/css/custom.css" rel="stylesheet" type="text/css"> %endif %endif % if needs_ipython_css: <link href="/assets/css/ipython.min.css" rel="stylesheet" type="text/css"> <link href="/assets/css/nikola_ipython.css" rel="stylesheet" type="text/css"> % endif <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=PT+Serif:400,400italic,700|PT+Sans:400"> </%def> .. figure:: https://getnikola.com/images/lanyon-1.thumbnail.png :target: https://getnikola.com/images/lanyon-1.png You may say this looks like crap. Don’t worry, we are just starting :-) Page Layout ----------- This is trickier but should be no problem for people with a basic understanding of HTML and a desire to make a theme! Lanyon’s content is split in two parts: a sidebar and the rest. The sidebar looks like this (shortened for comprehension): .. code:: html <body> <!-- Target for toggling the sidebar `.sidebar-checkbox` is for regular styles, `#sidebar-checkbox` for behavior. --> <input type="checkbox" class="sidebar-checkbox" id="sidebar-checkbox"> <!-- Toggleable sidebar --> <div class="sidebar" id="sidebar"> <div class="sidebar-item"> <p>A reserved <a href="https://jekyllrb.com" target="_blank">Jekyll</a> theme that places the utmost gravity on content with a hidden drawer. Made by <a href="https://twitter.com/mdo" target="_blank">@mdo</a>.</p> </div> <nav class="sidebar-nav"> <a class="sidebar-nav-item active" href="/">Home</a> <a class="sidebar-nav-item" href="/about/">About</a> [...] </nav> </div> So, a plain body, with an input element that controls the sidebar, a div which is the sidebar itself. Inside that, div.sidebar-item for items, and a nav with "navigational links". This is followed by the "masthead" and the content itself, which we will look at in a bit. If we look for the equivalent code in Nikola’s side, we see this: .. code:: html <body> <a href="#content" class="sr-only sr-only-focusable">Skip to main content</a> <div id="container"> <header id="header" role="banner"> <h1 id="brand"><a href="https://example.com/" title="My Nikola Site" rel="home"> <span id="blog-title">My Nikola Site</span> </a></h1> <nav id="menu" role="navigation"><ul> <li><a href="../archive.html">Archive</a></li> <li><a href="../categories/index.html">Tags</a></li> <li><a href="../rss.xml">RSS feed</a></li> So Nikola has the "masthead" above the nav element, and uses list elements in nav instead of bare links. Not all that different is it? Let’s make it lanyon-like! We will need 2 more templates: `base.tmpl <https://github.com/getnikola/nikola/blob/master/nikola/data/themes/base/templates/base.tmpl>`__ and `base_header.tmpl <https://github.com/getnikola/nikola/blob/master/nikola/data/themes/base/templates/base_header.tmpl>`__. Get them and put them in your ``themes/lanyon/templates`` folder. Let’s look at ``base.tmpl`` first. It’s short and nice, it looks like a webpage without all the interesting stuff: .. code:: html+mako ## -*- coding: utf-8 -*- <%namespace name="base" file="base_helper.tmpl" import="*"/> <%namespace name="header" file="base_header.tmpl" import="*"/> <%namespace name="footer" file="base_footer.tmpl" import="*"/> ${set_locale(lang)} ${base.html_headstart()} <%block name="extra_head"> ### Leave this block alone. </%block> ${template_hooks['extra_head']()} </head> <body> <a href="#content" class="sr-only sr-only-focusable">${messages("Skip to main content")}</a> <div id="container"> ${header.html_header()} <main id="content" role="main"> <%block name="content"></%block> </main> ${footer.html_footer()} </div> ${body_end} ${template_hooks['body_end']()} ${base.late_load_js()} </body> </html> That link which says "Skip to main content" is very important for accessibility, so we will leave it in place. But below, you can see how it creates the "container" div we see in the Nikola page, and the content is created by ``html_header()`` which is defined in ``base_header.tmpl`` The actual ``nav`` element is done by the ``html_navigation_links`` function out of the ``NAVIGATION_LINKS`` and ``NAVIGATION_ALT_LINKS`` options. (Let's put the alt links after regular ones; Bootstrap puts it on the right side, for example.) So, first, lets change that base template to be more lanyon-like: .. code:: html+mako ## -*- coding: utf-8 -*- <%namespace name="base" file="base_helper.tmpl" import="*"/> <%namespace name="header" file="base_header.tmpl" import="*"/> <%namespace name="footer" file="base_footer.tmpl" import="*"/> ${set_locale(lang)} ${base.html_headstart()} <%block name="extra_head"> ### Leave this block alone. </%block> ${template_hooks['extra_head']()} </head> <body> <a href="#content" class="sr-only sr-only-focusable">${messages("Skip to main content")}</a> <!-- Target for toggling the sidebar `.sidebar-checkbox` is for regular styles, `#sidebar-checkbox` for behavior. --> <input type="checkbox" class="sidebar-checkbox" id="sidebar-checkbox"> <!-- Toggleable sidebar --> <div class="sidebar" id="sidebar"> <div class="sidebar-item"> <p>A reserved <a href="https://getnikola.com" target="_blank" rel="noopener">Nikola</a> theme that places the utmost gravity on content with a hidden drawer. Made by <a href="https://twitter.com/mdo" target="_blank" rel="noopener">@mdo</a> for Jekyll, ported to Nikola by <a href="https://twitter.com/ralsina" target="_blank">@ralsina</a>.</p> </div> ${header.html_navigation_links()} </div> <main id="content" role="main"> <%block name="content"></%block> </main> ${footer.html_footer()} ${body_end} ${template_hooks['body_end']()} ${base.late_load_js()} </body> </html> .. figure:: https://getnikola.com/images/lanyon-2.thumbnail.png :target: https://getnikola.com/images/lanyon-2.png And that’s after I exposed the sidebar by clicking on an invisible widget! One problem, which causes that yellow color in the sidebar is a CSS conflict. We are loading ``rst_base.css`` which specifies the background color of ``div.sidebar`` which is more specific than ``lanyon.css``, which specifies for ``.sidebar`` alone. There are many ways to fix this, I chose to change lanyon.css to also use div.sidebar: .. code:: css div.sidebar,.sidebar { position: fixed; top: 0; bottom: 0; left: -14rem; width: 14rem; [...] This is annoying but it will happen when you just grab CSS from different places. The "Inspect Element" feature of your web browser is your best friend for these situations. Another problem is that the contents of the nav element are wrong. They are not bare links. We will fix that in ``base_header.html``, like this: .. code:: html+mako <%def name="html_navigation_links()"> <nav id="menu" role="navigation" class="sidebar-nav"> %for url, text in navigation_links[lang]: <a class="sidebar-nav-item" href="${url}">${text}</a> %endfor ${template_hooks['menu']()} %for url, text in navigation_alt_links[lang]: <a class="sidebar-nav-item" href="${url}">${text}</a> %endfor ${template_hooks['menu_alt']()} </nav> </%def> **Note: this means this theme will not support submenus in navigation. If you want that, I’ll happily take a patch.** .. figure:: https://getnikola.com/images/lanyon-3.thumbnail.png :target: https://getnikola.com/images/lanyon-3.png Starting to see a resemblance? Now let’s look at the content. In Lanyon, this is how the "main" content looks: .. code:: html <!-- Wrap is the content to shift when toggling the sidebar. We wrap the content to avoid any CSS collisions with our real content. --> <div class="wrap"> <div class="masthead"> <div class="container"> <h3 class="masthead-title"> <a href="/" title="Home">Lanyon</a> <small>A Jekyll theme</small> </h3> </div> </div> <div class="container content"> <div class="post"> <h1 class="post-title">Introducing Lanyon</h1> <span class="post-date">02 Jan 2014</span> <p>Lanyon is an unassuming <a href="https://jekyllrb.com">Jekyll</a> theme [...] </div> </div> </div> <label for="sidebar-checkbox" class="sidebar-toggle"></label> </body> </html> Everything inside the "container content" div is… the content. The rest is a masthead with the site title and at the bottom a label for the sidebar toggle. Easy to do in ``base.tmpl`` (only showing the relevant part): .. code:: html+mako <!-- Wrap is the content to shift when toggling the sidebar. We wrap the content to avoid any CSS collisions with our real content. --> <div class="wrap"> <div class="masthead"> <div class="container"> <h3 class="masthead-title"> <a href="/" title="Home">Lanyon</a> <small>A Jekyll theme</small> </h3> </div> </div> <div class="container content" id="content"> <%block name="content"></%block> </div> </div> <label for="sidebar-checkbox" class="sidebar-toggle"></label> ${footer.html_footer()} ${body_end} ${template_hooks['body_end']()} ${base.late_load_js()} </body> </html> .. figure:: https://getnikola.com/images/lanyon-4.thumbnail.png :target: https://getnikola.com/images/lanyon-4.png Getting there! The sidebar looks bad because of yet more CSS conflicts with ``rst_base.css``. By adding some extra styling in ``lanyon.css``, it will look better. .. code:: css /* Style and "hide" the sidebar */ div.sidebar, .sidebar { position: fixed; top: 0; bottom: 0; left: -14rem; width: 14rem; visibility: hidden; overflow-y: auto; padding: 0; margin: 0; border: none; font-family: "PT Sans", Helvetica, Arial, sans-serif; font-size: .875rem; /* 15px */ color: rgba(255,255,255,.6); background-color: #202020; -webkit-transition: all .3s ease-in-out; transition: all .3s ease-in-out; } Also, the accessibility link on top is visible when it should not. That’s because we removed ``theme.css`` from the base theme, and with it, we lost a couple of classes. We can add them in ``lanyon.css``, along with others used by other pieces of the site: .. code:: css .sr-only { position: absolute; width: 1px; height: 1px; padding: 0; margin: -1px; overflow: hidden; clip: rect(0, 0, 0, 0); border: 0; } .sr-only-focusable:active, .sr-only-focusable:focus { position: static; width: auto; height: auto; margin: 0; overflow: visible; clip: auto; } .breadcrumb { padding: 8px 15px; margin-bottom: 20px; list-style: none; } .breadcrumb > li { display: inline-block; margin-right: 0; margin-left: 0; } .breadcrumb > li:after { content: ' / '; color: #888; } .breadcrumb > li:last-of-type:after { content: ''; margin-left: 0; } .thumbnails > li { display: inline-block; margin-right: 10px; } .thumbnails > li:last-of-type { margin-right: 0; } .. figure:: https://getnikola.com/images/lanyon-5.thumbnail.png :target: https://getnikola.com/images/lanyon-5.png Little by little, things look better. One clear problem is that the title "Lanyon · A Jekyll theme" is set in the theme itself. We don’t do that sort of thing in Nikola, we have settings for that. So, let’s use them. There is a ``html_site_title`` function in ``base_helper.tmpl`` which is just the thing. So we change base.tmpl to use it: .. code:: html+mako <div class="wrap"> <div class="masthead"> <div class="container"> ${header.html_site_title()} </div> </div> That’s a ``<h1>`` instead of a ``<h3>`` like Lanyon does, but hey, it’s the right thing to do. If you want to go with an ``<h3>``, just change ``html_site_title`` itself. And now we more or less have the correct page layout and styles. Except for a rather large thing… Typography ---------- You can see in the previous screenshot that text still looks quite different in our port: Serif versus Sans-Serif content, and the titles have different colors! Let’s start with the titles. Here’s how they look in Lanyon: .. code:: html <h3 class="masthead-title"> <a href="/" title="Home">Lanyon</a> <small>A Jekyll theme</small> </h3> Versus our port: .. code:: html <h1 id="brand"><a href="https://example.com/" title="My Nikola Site" rel="home"> So, it looks like we will have to fix ``html_site_title`` after all: .. code:: html+mako <%def name="html_site_title()"> <h3 id="brand" class="masthead-title"> <a href="${_link("root", None, lang)}" title="${blog_title}" rel="home">${blog_title}</a> </h3> </%def> As for the actual content, that’s not in any of the templates we have seen so far. The page you see is an "index.tmpl" page, which means it’s a list of blog posts shown one below the other. Obviously it’s not doing things in the way the Lanyon CSS expects it to. Here’s the original, which you can find in Nikola’s source code: .. code:: html+mako ## -*- coding: utf-8 -*- <%namespace name="helper" file="index_helper.tmpl"/> <%namespace name="comments" file="comments_helper.tmpl"/> <%inherit file="base.tmpl"/> <%block name="extra_head"> ${parent.extra_head()} % if posts and (permalink == '/' or permalink == '/' + index_file): <link rel="prefetch" href="${posts[0].permalink()}" type="text/html"> % endif </%block> <%block name="content"> <%block name="content_header"></%block> <div class="postindex"> % for post in posts: <article class="h-entry post-${post.meta('type')}"> <header> <h1 class="p-name entry-title"><a href="${post.permalink()}" class="u-url">${post.title()|h}</a></h1> <div class="metadata"> <p class="byline author vcard"><span class="byline-name fn">${post.author()}</span></p> <p class="dateline"><a href="${post.permalink()}" rel="bookmark"><time class="published dt-published" datetime="${post.date.isoformat()}" title="${post.formatted_date(date_format)}">${post.formatted_date(date_format)}</time></a></p> % if not post.meta('nocomments') and site_has_comments: <p class="commentline">${comments.comment_link(post.permalink(), post._base_path)} % endif </div> </header> %if index_teasers: <div class="p-summary entry-summary"> ${post.text(teaser_only=True)} %else: <div class="e-content entry-content"> ${post.text(teaser_only=False)} %endif </div> </article> % endfor </div> ${helper.html_pager()} ${comments.comment_link_script()} ${helper.mathjax_script(posts)} </%block> And this is how it looks after I played with it for a while, making it generate code that looks closer to the Lanyon original: .. code:: html+mako <%block name="content"> <%block name="content_header"></%block> <div class="posts"> % for post in posts: <article class="post h-entry post-${post.meta('type')}"> <header> <h1 class="post-title p-name"><a href="${post.permalink()}" class="u-url">${post.title()|h}</a></h1> <div class="metadata"> <p class="byline author vcard"><span class="byline-name fn">${post.author()}</span></p> <p class="dateline"><a href="${post.permalink()}" rel="bookmark"><time class="post-date published dt-published" datetime="${post.date.isoformat()}" title="${post.formatted_date(date_format)}">${post.formatted_date(date_format)}</time></a></p> % if not post.meta('nocomments') and site_has_comments: <p class="commentline">${comments.comment_link(post.permalink(), post._base_path)} % endif </div> </header> %if index_teasers: <div class="p-summary entry-summary"> ${post.text(teaser_only=True)} %else: <div class="e-content entry-content"> ${post.text(teaser_only=False)} %endif </div> </article> % endfor </div> ${helper.html_pager()} ${comments.comment_link_script()} ${helper.mathjax_script(posts)} </%block> With these changes, it looks… similar? .. figure:: https://getnikola.com/images/lanyon-6.thumbnail.png :target: https://getnikola.com/images/lanyon-6.png It does! Similar changes (basically adding class names to elements) needed to be done in ``post_header.tmpl``: .. code:: html+mako <%def name="html_post_header()"> <header> ${html_title()} <div class="metadata"> <p class="byline author vcard"><span class="byline-name fn">${post.author()}</span></p> <p class="dateline"><a href="${post.permalink()}" rel="bookmark"><time class="post-date published dt-published" datetime="${post.date.isoformat()}" itemprop="datePublished" title="${post.formatted_date(date_format)}">${post.formatted_date(date_format)}</time></a></p> % if not post.meta('nocomments') and site_has_comments: <p class="commentline">${comments.comment_link(post.permalink(), post._base_path)} % endif %if post.description(): <meta name="description" itemprop="description" content="${post.description()}"> %endif </div> ${html_translations(post)} </header> </%def> Customization ------------- The original Lanyon theme supports some personalization options. It suggests you do them by tweaking the templates, and you *can* also do that in the Nikola port. But we prefer to use options for that, so that you can get a later, better version of the theme and it will still "just work". Let’s see the color schemes first. They apply easily, just tweak your ``body`` element like this: .. code:: html <body class="theme-base-08"> ... </body> We can tweak ``base.tmpl`` to do just that: .. code:: html+mako % if lanyon_subtheme: <body class="${lanyon_subtheme}"> %else: <body> %endif And then we can put the options in conf.py’s ``GLOBAL_CONTEXT``: .. code:: python GLOBAL_CONTEXT = { "lanyon_subtheme": "theme-base-08" } .. figure:: https://getnikola.com/images/lanyon-7.thumbnail.png :target: https://getnikola.com/images/lanyon-7.png Look at it, all themed up. Doing the same for layout-reverse, sidebar-overlay and the rest is left as an exercise for the reader. Bundles ------- If the ``USE_BUNDLES`` option set to True, Nikola can put several CSS or JS files together in a larger file, which can makes site load faster for some deployments. To do this, your theme needs a ``bundles`` file. The file format is a modified `config <https://docs.python.org/3/library/configparser.html>`_ file with no defined section; the basic syntax is:: outputfile1.js= thing1.js, thing2.js, ... outputfile2.css= thing1.css, thing2.css, ... For the Lanyon theme, it should look like this:: assets/css/all.css= rst_base.css, nikola_rst.css, code.css, poole.css, lanyon.css, custom.css, **Note:** trailing commas are optional **Note:** Some themes also support the ``USE_CDN`` option meaning that in some cases it will load one bundle with all CSS and in other will load some CSS files from a CDN and others from a bundle. This is complicated and probably not worth the effort. The End ------- And that’s it, that’s a whole theme. Eventually, once people start using it, they will notice small broken details, which will need handling one at a time. This theme should be available in https://themes.getnikola.com/v7/lanyon/ and you can see it in action at https://themes.getnikola.com/v7/lanyon/demo/ . What if you want to extend other parts of the theme? Check out the :doc:`Theming reference <theming>`. You can also contribute your improvements to the `nikola-themes <https://github.com/getnikola/nikola>` repository on GitHub.
PypiClean
/AQoPA-0.9.5.tar.gz/AQoPA-0.9.5/aqopa/module/financialanalysis/__init__.py
from aqopa import module from .gui import ModuleGui from aqopa.simulator.state import HOOK_TYPE_SIMULATION_FINISHED from .console import PrintResultsHook """ @file __init__.py @brief initial file for the financialanalysis module @author Katarzyna Mazur """ class Module(module.Module): def __init__(self, energyanalysis_module): self.energyanalysis_module = energyanalysis_module self.consumption_costs = {} self.cost_per_kWh = 0 def get_cost_per_kWh(self): return self.cost_per_kWh def set_cost_per_kWh(self, cost_per_kWh): self.cost_per_kWh = cost_per_kWh def get_gui(self): if not getattr(self, '__gui', None): setattr(self, '__gui', ModuleGui(self)) return getattr(self, '__gui', None) def _install(self, simulator): """ """ return simulator def install_console(self, simulator): """ Install module for console simulation """ self._install(simulator) hook = PrintResultsHook(self, simulator) simulator.register_hook(HOOK_TYPE_SIMULATION_FINISHED, hook) return simulator def install_gui(self, simulator): """ Install module for gui simulation """ self._install(simulator) return simulator def __convert_to_joules(self, millijoules): return millijoules / 1000.0 def __convert_to_kWh(self, joules): return joules / 3600000.0 def calculate_cost(self, consumed_joules, cost_per_kWh): kWhs = self.__convert_to_kWh(consumed_joules) cost = kWhs * cost_per_kWh return cost def calculate_cost_for_host(self, simulator, host, cost_per_kWh): all_consumptions = self.get_all_hosts_consumption(simulator, simulator.context.hosts) joules = all_consumptions[host]['energy'] cost_for_host = self.calculate_cost(joules, cost_per_kWh) return cost_for_host def calculate_all_costs(self, simulator, hosts, cost_per_kWh): all_costs = {} for host in hosts: all_costs[host] = self.calculate_cost_for_host(simulator, host, cost_per_kWh) self.add_cost(simulator, host, all_costs[host]) return all_costs def add_cost(self, simulator, host, cost): """ @brief adds cost of power consumption to the list of cost consumptions for the particular host present in the QoP-ML's model """ # add a new simulator if not available yet if simulator not in self.consumption_costs: self.consumption_costs[simulator] = {} # add a new host if not available yet if host not in self.consumption_costs[simulator]: self.consumption_costs[simulator][host] = [] # add cost for the host - but only if we # have not added it yet and if it is not 'empty' if cost not in self.consumption_costs[simulator][host] and cost: self.consumption_costs[simulator][host].append(cost) def get_min_cost(self, simulator, hosts): host = hosts[0] min_cost = self.consumption_costs[simulator][hosts[0]] if len(min_cost) > 0 : for h in hosts: if self.consumption_costs[simulator][h] < min_cost: min_cost = self.consumption_costs[simulator][h] host = h return min_cost[0], host else : return 0, host def get_max_cost(self, simulator, hosts): host = hosts[0] max_cost = self.consumption_costs[simulator][hosts[0]] if len(max_cost) > 0 : for h in hosts: if self.consumption_costs[simulator][h] > max_cost: max_cost = self.consumption_costs[simulator][h] host = h return max_cost[0], host else: return 0, host def get_avg_cost(self, simulator, hosts): cost_sum = 0.0 i = 0 for host in hosts: for cost in self.consumption_costs[simulator][host]: cost_sum += cost i += 1 if i != 0: return cost_sum / i else: return 0 def get_total_cost(self, simulator, hosts): cost_sum = 0.0 for host in hosts: for cost in self.consumption_costs[simulator][host]: cost_sum += cost return cost_sum def get_all_costs(self, simulator): if simulator not in self.consumption_costs: return [] return self.consumption_costs[simulator] # def set_all_costs(self, consumption_costs): # self.consumption_costs = copy.deepcopy(consumption_costs) def get_all_hosts_consumption(self, simulator, hosts): voltage = self.energyanalysis_module.get_voltage() consumptions = self.energyanalysis_module.get_hosts_consumptions(simulator, hosts, voltage) return consumptions
PypiClean
/Firefly_III_API_Client-2.0.5.0-py3-none-any.whl/firefly_iii_client/paths/v1_data_export_rules/get.py
from dataclasses import dataclass import typing_extensions import urllib3 from urllib3._collections import HTTPHeaderDict from firefly_iii_client import api_client, exceptions from datetime import date, datetime # noqa: F401 import decimal # noqa: F401 import functools # noqa: F401 import io # noqa: F401 import re # noqa: F401 import typing # noqa: F401 import typing_extensions # noqa: F401 import uuid # noqa: F401 import frozendict # noqa: F401 from firefly_iii_client import schemas # noqa: F401 from firefly_iii_client.model.export_file_filter import ExportFileFilter from firefly_iii_client.model.unauthenticated import Unauthenticated from firefly_iii_client.model.bad_request import BadRequest from firefly_iii_client.model.internal_exception import InternalException from firefly_iii_client.model.not_found import NotFound from . import path # Query params TypeSchema = ExportFileFilter RequestRequiredQueryParams = typing_extensions.TypedDict( 'RequestRequiredQueryParams', { } ) RequestOptionalQueryParams = typing_extensions.TypedDict( 'RequestOptionalQueryParams', { 'type': typing.Union[TypeSchema, ], }, total=False ) class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams): pass request_query_type = api_client.QueryParameter( name="type", style=api_client.ParameterStyle.FORM, schema=TypeSchema, explode=True, ) # Header params XTraceIdSchema = schemas.UUIDSchema RequestRequiredHeaderParams = typing_extensions.TypedDict( 'RequestRequiredHeaderParams', { } ) RequestOptionalHeaderParams = typing_extensions.TypedDict( 'RequestOptionalHeaderParams', { 'X-Trace-Id': typing.Union[XTraceIdSchema, str, uuid.UUID, ], }, total=False ) class RequestHeaderParams(RequestRequiredHeaderParams, RequestOptionalHeaderParams): pass request_header_x_trace_id = api_client.HeaderParameter( name="X-Trace-Id", style=api_client.ParameterStyle.SIMPLE, schema=XTraceIdSchema, ) _auth = [ 'firefly_iii_auth', ] SchemaFor200ResponseBodyApplicationOctetStream = schemas.BinarySchema @dataclass class ApiResponseFor200(api_client.ApiResponse): response: urllib3.HTTPResponse body: typing.Union[ SchemaFor200ResponseBodyApplicationOctetStream, ] headers: schemas.Unset = schemas.unset _response_for_200 = api_client.OpenApiResponse( response_cls=ApiResponseFor200, content={ 'application/octet-stream': api_client.MediaType( schema=SchemaFor200ResponseBodyApplicationOctetStream), }, ) SchemaFor400ResponseBodyApplicationJson = BadRequest @dataclass class ApiResponseFor400(api_client.ApiResponse): response: urllib3.HTTPResponse body: typing.Union[ SchemaFor400ResponseBodyApplicationJson, ] headers: schemas.Unset = schemas.unset _response_for_400 = api_client.OpenApiResponse( response_cls=ApiResponseFor400, content={ 'application/json': api_client.MediaType( schema=SchemaFor400ResponseBodyApplicationJson), }, ) SchemaFor401ResponseBodyApplicationJson = Unauthenticated @dataclass class ApiResponseFor401(api_client.ApiResponse): response: urllib3.HTTPResponse body: typing.Union[ SchemaFor401ResponseBodyApplicationJson, ] headers: schemas.Unset = schemas.unset _response_for_401 = api_client.OpenApiResponse( response_cls=ApiResponseFor401, content={ 'application/json': api_client.MediaType( schema=SchemaFor401ResponseBodyApplicationJson), }, ) SchemaFor404ResponseBodyApplicationJson = NotFound @dataclass class ApiResponseFor404(api_client.ApiResponse): response: urllib3.HTTPResponse body: typing.Union[ SchemaFor404ResponseBodyApplicationJson, ] headers: schemas.Unset = schemas.unset _response_for_404 = api_client.OpenApiResponse( response_cls=ApiResponseFor404, content={ 'application/json': api_client.MediaType( schema=SchemaFor404ResponseBodyApplicationJson), }, ) SchemaFor500ResponseBodyApplicationJson = InternalException @dataclass class ApiResponseFor500(api_client.ApiResponse): response: urllib3.HTTPResponse body: typing.Union[ SchemaFor500ResponseBodyApplicationJson, ] headers: schemas.Unset = schemas.unset _response_for_500 = api_client.OpenApiResponse( response_cls=ApiResponseFor500, content={ 'application/json': api_client.MediaType( schema=SchemaFor500ResponseBodyApplicationJson), }, ) _status_code_to_response = { '200': _response_for_200, '400': _response_for_400, '401': _response_for_401, '404': _response_for_404, '500': _response_for_500, } _all_accept_content_types = ( 'application/octet-stream', 'application/json', ) class BaseApi(api_client.Api): @typing.overload def _export_rules_oapg( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: typing_extensions.Literal[False] = ..., ) -> typing.Union[ ApiResponseFor200, ]: ... @typing.overload def _export_rules_oapg( self, skip_deserialization: typing_extensions.Literal[True], query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, ) -> api_client.ApiResponseWithoutDeserialization: ... @typing.overload def _export_rules_oapg( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = ..., ) -> typing.Union[ ApiResponseFor200, api_client.ApiResponseWithoutDeserialization, ]: ... def _export_rules_oapg( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = False, ): """ Export rule groups and rule data from Firefly III :param skip_deserialization: If true then api_response.response will be set but api_response.body and api_response.headers will not be deserialized into schema class instances """ self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params) self._verify_typed_dict_inputs_oapg(RequestHeaderParams, header_params) used_path = path.value prefix_separator_iterator = None for parameter in ( request_query_type, ): parameter_data = query_params.get(parameter.name, schemas.unset) if parameter_data is schemas.unset: continue if prefix_separator_iterator is None: prefix_separator_iterator = parameter.get_prefix_separator_iterator() serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator) for serialized_value in serialized_data.values(): used_path += serialized_value _headers = HTTPHeaderDict() for parameter in ( request_header_x_trace_id, ): parameter_data = header_params.get(parameter.name, schemas.unset) if parameter_data is schemas.unset: continue serialized_data = parameter.serialize(parameter_data) _headers.extend(serialized_data) # TODO add cookie handling if accept_content_types: for accept_content_type in accept_content_types: _headers.add('Accept', accept_content_type) response = self.api_client.call_api( resource_path=used_path, method='get'.upper(), headers=_headers, auth_settings=_auth, stream=stream, timeout=timeout, ) if skip_deserialization: api_response = api_client.ApiResponseWithoutDeserialization(response=response) else: response_for_status = _status_code_to_response.get(str(response.status)) if response_for_status: api_response = response_for_status.deserialize(response, self.api_client.configuration) else: api_response = api_client.ApiResponseWithoutDeserialization(response=response) if not 200 <= response.status <= 299: raise exceptions.ApiException( status=response.status, reason=response.reason, api_response=api_response ) return api_response class ExportRules(BaseApi): # this class is used by api classes that refer to endpoints with operationId fn names @typing.overload def export_rules( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: typing_extensions.Literal[False] = ..., ) -> typing.Union[ ApiResponseFor200, ]: ... @typing.overload def export_rules( self, skip_deserialization: typing_extensions.Literal[True], query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, ) -> api_client.ApiResponseWithoutDeserialization: ... @typing.overload def export_rules( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = ..., ) -> typing.Union[ ApiResponseFor200, api_client.ApiResponseWithoutDeserialization, ]: ... def export_rules( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = False, ): return self._export_rules_oapg( query_params=query_params, header_params=header_params, accept_content_types=accept_content_types, stream=stream, timeout=timeout, skip_deserialization=skip_deserialization ) class ApiForget(BaseApi): # this class is used by api classes that refer to endpoints by path and http method names @typing.overload def get( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: typing_extensions.Literal[False] = ..., ) -> typing.Union[ ApiResponseFor200, ]: ... @typing.overload def get( self, skip_deserialization: typing_extensions.Literal[True], query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, ) -> api_client.ApiResponseWithoutDeserialization: ... @typing.overload def get( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = ..., ) -> typing.Union[ ApiResponseFor200, api_client.ApiResponseWithoutDeserialization, ]: ... def get( self, query_params: RequestQueryParams = frozendict.frozendict(), header_params: RequestHeaderParams = frozendict.frozendict(), accept_content_types: typing.Tuple[str] = _all_accept_content_types, stream: bool = False, timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None, skip_deserialization: bool = False, ): return self._export_rules_oapg( query_params=query_params, header_params=header_params, accept_content_types=accept_content_types, stream=stream, timeout=timeout, skip_deserialization=skip_deserialization )
PypiClean
/Flask-Vue-0.3.5.tar.gz/Flask-Vue-0.3.5/flask_vue/static/vue-router.js
(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : global.VueRouter = factory(); }(this, function () { 'use strict'; var babelHelpers = {}; babelHelpers.classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }; function Target(path, matcher, delegate) { this.path = path; this.matcher = matcher; this.delegate = delegate; } Target.prototype = { to: function to(target, callback) { var delegate = this.delegate; if (delegate && delegate.willAddRoute) { target = delegate.willAddRoute(this.matcher.target, target); } this.matcher.add(this.path, target); if (callback) { if (callback.length === 0) { throw new Error("You must have an argument in the function passed to `to`"); } this.matcher.addChild(this.path, target, callback, this.delegate); } return this; } }; function Matcher(target) { this.routes = {}; this.children = {}; this.target = target; } Matcher.prototype = { add: function add(path, handler) { this.routes[path] = handler; }, addChild: function addChild(path, target, callback, delegate) { var matcher = new Matcher(target); this.children[path] = matcher; var match = generateMatch(path, matcher, delegate); if (delegate && delegate.contextEntered) { delegate.contextEntered(target, match); } callback(match); } }; function generateMatch(startingPath, matcher, delegate) { return function (path, nestedCallback) { var fullPath = startingPath + path; if (nestedCallback) { nestedCallback(generateMatch(fullPath, matcher, delegate)); } else { return new Target(startingPath + path, matcher, delegate); } }; } function addRoute(routeArray, path, handler) { var len = 0; for (var i = 0, l = routeArray.length; i < l; i++) { len += routeArray[i].path.length; } path = path.substr(len); var route = { path: path, handler: handler }; routeArray.push(route); } function eachRoute(baseRoute, matcher, callback, binding) { var routes = matcher.routes; for (var path in routes) { if (routes.hasOwnProperty(path)) { var routeArray = baseRoute.slice(); addRoute(routeArray, path, routes[path]); if (matcher.children[path]) { eachRoute(routeArray, matcher.children[path], callback, binding); } else { callback.call(binding, routeArray); } } } } function map (callback, addRouteCallback) { var matcher = new Matcher(); callback(generateMatch("", matcher, this.delegate)); eachRoute([], matcher, function (route) { if (addRouteCallback) { addRouteCallback(this, route); } else { this.add(route); } }, this); } var specials = ['/', '.', '*', '+', '?', '|', '(', ')', '[', ']', '{', '}', '\\']; var escapeRegex = new RegExp('(\\' + specials.join('|\\') + ')', 'g'); var noWarning = false; function warn(msg) { if (!noWarning && typeof console !== 'undefined') { console.error('[vue-router] ' + msg); } } function tryDecode(uri, asComponent) { try { return asComponent ? decodeURIComponent(uri) : decodeURI(uri); } catch (e) { warn('malformed URI' + (asComponent ? ' component: ' : ': ') + uri); } } function isArray(test) { return Object.prototype.toString.call(test) === "[object Array]"; } // A Segment represents a segment in the original route description. // Each Segment type provides an `eachChar` and `regex` method. // // The `eachChar` method invokes the callback with one or more character // specifications. A character specification consumes one or more input // characters. // // The `regex` method returns a regex fragment for the segment. If the // segment is a dynamic of star segment, the regex fragment also includes // a capture. // // A character specification contains: // // * `validChars`: a String with a list of all valid characters, or // * `invalidChars`: a String with a list of all invalid characters // * `repeat`: true if the character specification can repeat function StaticSegment(string) { this.string = string; } StaticSegment.prototype = { eachChar: function eachChar(callback) { var string = this.string, ch; for (var i = 0, l = string.length; i < l; i++) { ch = string.charAt(i); callback({ validChars: ch }); } }, regex: function regex() { return this.string.replace(escapeRegex, '\\$1'); }, generate: function generate() { return this.string; } }; function DynamicSegment(name) { this.name = name; } DynamicSegment.prototype = { eachChar: function eachChar(callback) { callback({ invalidChars: "/", repeat: true }); }, regex: function regex() { return "([^/]+)"; }, generate: function generate(params) { var val = params[this.name]; return val == null ? ":" + this.name : val; } }; function StarSegment(name) { this.name = name; } StarSegment.prototype = { eachChar: function eachChar(callback) { callback({ invalidChars: "", repeat: true }); }, regex: function regex() { return "(.+)"; }, generate: function generate(params) { var val = params[this.name]; return val == null ? ":" + this.name : val; } }; function EpsilonSegment() {} EpsilonSegment.prototype = { eachChar: function eachChar() {}, regex: function regex() { return ""; }, generate: function generate() { return ""; } }; function parse(route, names, specificity) { // normalize route as not starting with a "/". Recognition will // also normalize. if (route.charAt(0) === "/") { route = route.substr(1); } var segments = route.split("/"), results = []; // A routes has specificity determined by the order that its different segments // appear in. This system mirrors how the magnitude of numbers written as strings // works. // Consider a number written as: "abc". An example would be "200". Any other number written // "xyz" will be smaller than "abc" so long as `a > z`. For instance, "199" is smaller // then "200", even though "y" and "z" (which are both 9) are larger than "0" (the value // of (`b` and `c`). This is because the leading symbol, "2", is larger than the other // leading symbol, "1". // The rule is that symbols to the left carry more weight than symbols to the right // when a number is written out as a string. In the above strings, the leading digit // represents how many 100's are in the number, and it carries more weight than the middle // number which represents how many 10's are in the number. // This system of number magnitude works well for route specificity, too. A route written as // `a/b/c` will be more specific than `x/y/z` as long as `a` is more specific than // `x`, irrespective of the other parts. // Because of this similarity, we assign each type of segment a number value written as a // string. We can find the specificity of compound routes by concatenating these strings // together, from left to right. After we have looped through all of the segments, // we convert the string to a number. specificity.val = ''; for (var i = 0, l = segments.length; i < l; i++) { var segment = segments[i], match; if (match = segment.match(/^:([^\/]+)$/)) { results.push(new DynamicSegment(match[1])); names.push(match[1]); specificity.val += '3'; } else if (match = segment.match(/^\*([^\/]+)$/)) { results.push(new StarSegment(match[1])); specificity.val += '2'; names.push(match[1]); } else if (segment === "") { results.push(new EpsilonSegment()); specificity.val += '1'; } else { results.push(new StaticSegment(segment)); specificity.val += '4'; } } specificity.val = +specificity.val; return results; } // A State has a character specification and (`charSpec`) and a list of possible // subsequent states (`nextStates`). // // If a State is an accepting state, it will also have several additional // properties: // // * `regex`: A regular expression that is used to extract parameters from paths // that reached this accepting state. // * `handlers`: Information on how to convert the list of captures into calls // to registered handlers with the specified parameters // * `types`: How many static, dynamic or star segments in this route. Used to // decide which route to use if multiple registered routes match a path. // // Currently, State is implemented naively by looping over `nextStates` and // comparing a character specification against a character. A more efficient // implementation would use a hash of keys pointing at one or more next states. function State(charSpec) { this.charSpec = charSpec; this.nextStates = []; } State.prototype = { get: function get(charSpec) { var nextStates = this.nextStates; for (var i = 0, l = nextStates.length; i < l; i++) { var child = nextStates[i]; var isEqual = child.charSpec.validChars === charSpec.validChars; isEqual = isEqual && child.charSpec.invalidChars === charSpec.invalidChars; if (isEqual) { return child; } } }, put: function put(charSpec) { var state; // If the character specification already exists in a child of the current // state, just return that state. if (state = this.get(charSpec)) { return state; } // Make a new state for the character spec state = new State(charSpec); // Insert the new state as a child of the current state this.nextStates.push(state); // If this character specification repeats, insert the new state as a child // of itself. Note that this will not trigger an infinite loop because each // transition during recognition consumes a character. if (charSpec.repeat) { state.nextStates.push(state); } // Return the new state return state; }, // Find a list of child states matching the next character match: function match(ch) { // DEBUG "Processing `" + ch + "`:" var nextStates = this.nextStates, child, charSpec, chars; // DEBUG " " + debugState(this) var returned = []; for (var i = 0, l = nextStates.length; i < l; i++) { child = nextStates[i]; charSpec = child.charSpec; if (typeof (chars = charSpec.validChars) !== 'undefined') { if (chars.indexOf(ch) !== -1) { returned.push(child); } } else if (typeof (chars = charSpec.invalidChars) !== 'undefined') { if (chars.indexOf(ch) === -1) { returned.push(child); } } } return returned; } /** IF DEBUG , debug: function() { var charSpec = this.charSpec, debug = "[", chars = charSpec.validChars || charSpec.invalidChars; if (charSpec.invalidChars) { debug += "^"; } debug += chars; debug += "]"; if (charSpec.repeat) { debug += "+"; } return debug; } END IF **/ }; /** IF DEBUG function debug(log) { console.log(log); } function debugState(state) { return state.nextStates.map(function(n) { if (n.nextStates.length === 0) { return "( " + n.debug() + " [accepting] )"; } return "( " + n.debug() + " <then> " + n.nextStates.map(function(s) { return s.debug() }).join(" or ") + " )"; }).join(", ") } END IF **/ // Sort the routes by specificity function sortSolutions(states) { return states.sort(function (a, b) { return b.specificity.val - a.specificity.val; }); } function recognizeChar(states, ch) { var nextStates = []; for (var i = 0, l = states.length; i < l; i++) { var state = states[i]; nextStates = nextStates.concat(state.match(ch)); } return nextStates; } var oCreate = Object.create || function (proto) { function F() {} F.prototype = proto; return new F(); }; function RecognizeResults(queryParams) { this.queryParams = queryParams || {}; } RecognizeResults.prototype = oCreate({ splice: Array.prototype.splice, slice: Array.prototype.slice, push: Array.prototype.push, length: 0, queryParams: null }); function findHandler(state, path, queryParams) { var handlers = state.handlers, regex = state.regex; var captures = path.match(regex), currentCapture = 1; var result = new RecognizeResults(queryParams); for (var i = 0, l = handlers.length; i < l; i++) { var handler = handlers[i], names = handler.names, params = {}; for (var j = 0, m = names.length; j < m; j++) { params[names[j]] = captures[currentCapture++]; } result.push({ handler: handler.handler, params: params, isDynamic: !!names.length }); } return result; } function addSegment(currentState, segment) { segment.eachChar(function (ch) { var state; currentState = currentState.put(ch); }); return currentState; } function decodeQueryParamPart(part) { // http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.1 part = part.replace(/\+/gm, '%20'); return tryDecode(part, true); } // The main interface var RouteRecognizer = function RouteRecognizer() { this.rootState = new State(); this.names = {}; }; RouteRecognizer.prototype = { add: function add(routes, options) { var currentState = this.rootState, regex = "^", specificity = {}, handlers = [], allSegments = [], name; var isEmpty = true; for (var i = 0, l = routes.length; i < l; i++) { var route = routes[i], names = []; var segments = parse(route.path, names, specificity); allSegments = allSegments.concat(segments); for (var j = 0, m = segments.length; j < m; j++) { var segment = segments[j]; if (segment instanceof EpsilonSegment) { continue; } isEmpty = false; // Add a "/" for the new segment currentState = currentState.put({ validChars: "/" }); regex += "/"; // Add a representation of the segment to the NFA and regex currentState = addSegment(currentState, segment); regex += segment.regex(); } var handler = { handler: route.handler, names: names }; handlers.push(handler); } if (isEmpty) { currentState = currentState.put({ validChars: "/" }); regex += "/"; } currentState.handlers = handlers; currentState.regex = new RegExp(regex + "$"); currentState.specificity = specificity; if (name = options && options.as) { this.names[name] = { segments: allSegments, handlers: handlers }; } }, handlersFor: function handlersFor(name) { var route = this.names[name], result = []; if (!route) { throw new Error("There is no route named " + name); } for (var i = 0, l = route.handlers.length; i < l; i++) { result.push(route.handlers[i]); } return result; }, hasRoute: function hasRoute(name) { return !!this.names[name]; }, generate: function generate(name, params) { var route = this.names[name], output = ""; if (!route) { throw new Error("There is no route named " + name); } var segments = route.segments; for (var i = 0, l = segments.length; i < l; i++) { var segment = segments[i]; if (segment instanceof EpsilonSegment) { continue; } output += "/"; output += segment.generate(params); } if (output.charAt(0) !== '/') { output = '/' + output; } if (params && params.queryParams) { output += this.generateQueryString(params.queryParams); } return output; }, generateQueryString: function generateQueryString(params) { var pairs = []; var keys = []; for (var key in params) { if (params.hasOwnProperty(key)) { keys.push(key); } } keys.sort(); for (var i = 0, len = keys.length; i < len; i++) { key = keys[i]; var value = params[key]; if (value == null) { continue; } var pair = encodeURIComponent(key); if (isArray(value)) { for (var j = 0, l = value.length; j < l; j++) { var arrayPair = key + '[]' + '=' + encodeURIComponent(value[j]); pairs.push(arrayPair); } } else { pair += "=" + encodeURIComponent(value); pairs.push(pair); } } if (pairs.length === 0) { return ''; } return "?" + pairs.join("&"); }, parseQueryString: function parseQueryString(queryString) { var pairs = queryString.split("&"), queryParams = {}; for (var i = 0; i < pairs.length; i++) { var pair = pairs[i].split('='), key = decodeQueryParamPart(pair[0]), keyLength = key.length, isArray = false, value; if (pair.length === 1) { value = 'true'; } else { //Handle arrays if (keyLength > 2 && key.slice(keyLength - 2) === '[]') { isArray = true; key = key.slice(0, keyLength - 2); if (!queryParams[key]) { queryParams[key] = []; } } value = pair[1] ? decodeQueryParamPart(pair[1]) : ''; } if (isArray) { queryParams[key].push(value); } else { queryParams[key] = value; } } return queryParams; }, recognize: function recognize(path, silent) { noWarning = silent; var states = [this.rootState], pathLen, i, l, queryStart, queryParams = {}, isSlashDropped = false; queryStart = path.indexOf('?'); if (queryStart !== -1) { var queryString = path.substr(queryStart + 1, path.length); path = path.substr(0, queryStart); if (queryString) { queryParams = this.parseQueryString(queryString); } } path = tryDecode(path); if (!path) return; // DEBUG GROUP path if (path.charAt(0) !== "/") { path = "/" + path; } pathLen = path.length; if (pathLen > 1 && path.charAt(pathLen - 1) === "/") { path = path.substr(0, pathLen - 1); isSlashDropped = true; } for (i = 0, l = path.length; i < l; i++) { states = recognizeChar(states, path.charAt(i)); if (!states.length) { break; } } // END DEBUG GROUP var solutions = []; for (i = 0, l = states.length; i < l; i++) { if (states[i].handlers) { solutions.push(states[i]); } } states = sortSolutions(solutions); var state = solutions[0]; if (state && state.handlers) { // if a trailing slash was dropped and a star segment is the last segment // specified, put the trailing slash back if (isSlashDropped && state.regex.source.slice(-5) === "(.+)$") { path = path + "/"; } return findHandler(state, path, queryParams); } } }; RouteRecognizer.prototype.map = map; var genQuery = RouteRecognizer.prototype.generateQueryString; // export default for holding the Vue reference var exports$1 = {}; /** * Warn stuff. * * @param {String} msg */ function warn$1(msg) { /* istanbul ignore next */ if (typeof console !== 'undefined') { console.error('[vue-router] ' + msg); } } /** * Resolve a relative path. * * @param {String} base * @param {String} relative * @param {Boolean} append * @return {String} */ function resolvePath(base, relative, append) { var query = base.match(/(\?.*)$/); if (query) { query = query[1]; base = base.slice(0, -query.length); } // a query! if (relative.charAt(0) === '?') { return base + relative; } var stack = base.split('/'); // remove trailing segment if: // - not appending // - appending to trailing slash (last segment is empty) if (!append || !stack[stack.length - 1]) { stack.pop(); } // resolve relative path var segments = relative.replace(/^\//, '').split('/'); for (var i = 0; i < segments.length; i++) { var segment = segments[i]; if (segment === '.') { continue; } else if (segment === '..') { stack.pop(); } else { stack.push(segment); } } // ensure leading slash if (stack[0] !== '') { stack.unshift(''); } return stack.join('/'); } /** * Forgiving check for a promise * * @param {Object} p * @return {Boolean} */ function isPromise(p) { return p && typeof p.then === 'function'; } /** * Retrive a route config field from a component instance * OR a component contructor. * * @param {Function|Vue} component * @param {String} name * @return {*} */ function getRouteConfig(component, name) { var options = component && (component.$options || component.options); return options && options.route && options.route[name]; } /** * Resolve an async component factory. Have to do a dirty * mock here because of Vue core's internal API depends on * an ID check. * * @param {Object} handler * @param {Function} cb */ var resolver = undefined; function resolveAsyncComponent(handler, cb) { if (!resolver) { resolver = { resolve: exports$1.Vue.prototype._resolveComponent, $options: { components: { _: handler.component } } }; } else { resolver.$options.components._ = handler.component; } resolver.resolve('_', function (Component) { handler.component = Component; cb(Component); }); } /** * Map the dynamic segments in a path to params. * * @param {String} path * @param {Object} params * @param {Object} query */ function mapParams(path, params, query) { if (params === undefined) params = {}; path = path.replace(/:([^\/]+)/g, function (_, key) { var val = params[key]; /* istanbul ignore if */ if (!val) { warn$1('param "' + key + '" not found when generating ' + 'path for "' + path + '" with params ' + JSON.stringify(params)); } return val || ''; }); if (query) { path += genQuery(query); } return path; } var hashRE = /#.*$/; var HTML5History = (function () { function HTML5History(_ref) { var root = _ref.root; var onChange = _ref.onChange; babelHelpers.classCallCheck(this, HTML5History); if (root && root !== '/') { // make sure there's the starting slash if (root.charAt(0) !== '/') { root = '/' + root; } // remove trailing slash this.root = root.replace(/\/$/, ''); this.rootRE = new RegExp('^\\' + this.root); } else { this.root = null; } this.onChange = onChange; // check base tag var baseEl = document.querySelector('base'); this.base = baseEl && baseEl.getAttribute('href'); } HTML5History.prototype.start = function start() { var _this = this; this.listener = function (e) { var url = location.pathname + location.search; if (_this.root) { url = url.replace(_this.rootRE, ''); } _this.onChange(url, e && e.state, location.hash); }; window.addEventListener('popstate', this.listener); this.listener(); }; HTML5History.prototype.stop = function stop() { window.removeEventListener('popstate', this.listener); }; HTML5History.prototype.go = function go(path, replace, append) { var url = this.formatPath(path, append); if (replace) { history.replaceState({}, '', url); } else { // record scroll position by replacing current state history.replaceState({ pos: { x: window.pageXOffset, y: window.pageYOffset } }, '', location.href); // then push new state history.pushState({}, '', url); } var hashMatch = path.match(hashRE); var hash = hashMatch && hashMatch[0]; path = url // strip hash so it doesn't mess up params .replace(hashRE, '') // remove root before matching .replace(this.rootRE, ''); this.onChange(path, null, hash); }; HTML5History.prototype.formatPath = function formatPath(path, append) { return path.charAt(0) === '/' // absolute path ? this.root ? this.root + '/' + path.replace(/^\//, '') : path : resolvePath(this.base || location.pathname, path, append); }; return HTML5History; })(); var HashHistory = (function () { function HashHistory(_ref) { var hashbang = _ref.hashbang; var onChange = _ref.onChange; babelHelpers.classCallCheck(this, HashHistory); this.hashbang = hashbang; this.onChange = onChange; } HashHistory.prototype.start = function start() { var self = this; this.listener = function () { var path = location.hash; var raw = path.replace(/^#!?/, ''); // always if (raw.charAt(0) !== '/') { raw = '/' + raw; } var formattedPath = self.formatPath(raw); if (formattedPath !== path) { location.replace(formattedPath); return; } // determine query // note it's possible to have queries in both the actual URL // and the hash fragment itself. var query = location.search && path.indexOf('?') > -1 ? '&' + location.search.slice(1) : location.search; self.onChange(path.replace(/^#!?/, '') + query); }; window.addEventListener('hashchange', this.listener); this.listener(); }; HashHistory.prototype.stop = function stop() { window.removeEventListener('hashchange', this.listener); }; HashHistory.prototype.go = function go(path, replace, append) { path = this.formatPath(path, append); if (replace) { location.replace(path); } else { location.hash = path; } }; HashHistory.prototype.formatPath = function formatPath(path, append) { var isAbsoloute = path.charAt(0) === '/'; var prefix = '#' + (this.hashbang ? '!' : ''); return isAbsoloute ? prefix + path : prefix + resolvePath(location.hash.replace(/^#!?/, ''), path, append); }; return HashHistory; })(); var AbstractHistory = (function () { function AbstractHistory(_ref) { var onChange = _ref.onChange; babelHelpers.classCallCheck(this, AbstractHistory); this.onChange = onChange; this.currentPath = '/'; } AbstractHistory.prototype.start = function start() { this.onChange('/'); }; AbstractHistory.prototype.stop = function stop() { // noop }; AbstractHistory.prototype.go = function go(path, replace, append) { path = this.currentPath = this.formatPath(path, append); this.onChange(path); }; AbstractHistory.prototype.formatPath = function formatPath(path, append) { return path.charAt(0) === '/' ? path : resolvePath(this.currentPath, path, append); }; return AbstractHistory; })(); /** * Determine the reusability of an existing router view. * * @param {Directive} view * @param {Object} handler * @param {Transition} transition */ function canReuse(view, handler, transition) { var component = view.childVM; if (!component || !handler) { return false; } // important: check view.Component here because it may // have been changed in activate hook if (view.Component !== handler.component) { return false; } var canReuseFn = getRouteConfig(component, 'canReuse'); return typeof canReuseFn === 'boolean' ? canReuseFn : canReuseFn ? canReuseFn.call(component, { to: transition.to, from: transition.from }) : true; // defaults to true } /** * Check if a component can deactivate. * * @param {Directive} view * @param {Transition} transition * @param {Function} next */ function canDeactivate(view, transition, next) { var fromComponent = view.childVM; var hook = getRouteConfig(fromComponent, 'canDeactivate'); if (!hook) { next(); } else { transition.callHook(hook, fromComponent, next, { expectBoolean: true }); } } /** * Check if a component can activate. * * @param {Object} handler * @param {Transition} transition * @param {Function} next */ function canActivate(handler, transition, next) { resolveAsyncComponent(handler, function (Component) { // have to check due to async-ness if (transition.aborted) { return; } // determine if this component can be activated var hook = getRouteConfig(Component, 'canActivate'); if (!hook) { next(); } else { transition.callHook(hook, null, next, { expectBoolean: true }); } }); } /** * Call deactivate hooks for existing router-views. * * @param {Directive} view * @param {Transition} transition * @param {Function} next */ function deactivate(view, transition, next) { var component = view.childVM; var hook = getRouteConfig(component, 'deactivate'); if (!hook) { next(); } else { transition.callHooks(hook, component, next); } } /** * Activate / switch component for a router-view. * * @param {Directive} view * @param {Transition} transition * @param {Number} depth * @param {Function} [cb] */ function activate(view, transition, depth, cb, reuse) { var handler = transition.activateQueue[depth]; if (!handler) { saveChildView(view); if (view._bound) { view.setComponent(null); } cb && cb(); return; } var Component = view.Component = handler.component; var activateHook = getRouteConfig(Component, 'activate'); var dataHook = getRouteConfig(Component, 'data'); var waitForData = getRouteConfig(Component, 'waitForData'); view.depth = depth; view.activated = false; var component = undefined; var loading = !!(dataHook && !waitForData); // "reuse" is a flag passed down when the parent view is // either reused via keep-alive or as a child of a kept-alive view. // of course we can only reuse if the current kept-alive instance // is of the correct type. reuse = reuse && view.childVM && view.childVM.constructor === Component; if (reuse) { // just reuse component = view.childVM; component.$loadingRouteData = loading; } else { saveChildView(view); // unbuild current component. this step also destroys // and removes all nested child views. view.unbuild(true); // build the new component. this will also create the // direct child view of the current one. it will register // itself as view.childView. component = view.build({ _meta: { $loadingRouteData: loading }, created: function created() { this._routerView = view; } }); // handle keep-alive. // when a kept-alive child vm is restored, we need to // add its cached child views into the router's view list, // and also properly update current view's child view. if (view.keepAlive) { component.$loadingRouteData = loading; var cachedChildView = component._keepAliveRouterView; if (cachedChildView) { view.childView = cachedChildView; component._keepAliveRouterView = null; } } } // cleanup the component in case the transition is aborted // before the component is ever inserted. var cleanup = function cleanup() { component.$destroy(); }; // actually insert the component and trigger transition var insert = function insert() { if (reuse) { cb && cb(); return; } var router = transition.router; if (router._rendered || router._transitionOnLoad) { view.transition(component); } else { // no transition on first render, manual transition /* istanbul ignore if */ if (view.setCurrent) { // 0.12 compat view.setCurrent(component); } else { // 1.0 view.childVM = component; } component.$before(view.anchor, null, false); } cb && cb(); }; var afterData = function afterData() { // activate the child view if (view.childView) { activate(view.childView, transition, depth + 1, null, reuse || view.keepAlive); } insert(); }; // called after activation hook is resolved var afterActivate = function afterActivate() { view.activated = true; if (dataHook && waitForData) { // wait until data loaded to insert loadData(component, transition, dataHook, afterData, cleanup); } else { // load data and insert at the same time if (dataHook) { loadData(component, transition, dataHook); } afterData(); } }; if (activateHook) { transition.callHooks(activateHook, component, afterActivate, { cleanup: cleanup, postActivate: true }); } else { afterActivate(); } } /** * Reuse a view, just reload data if necessary. * * @param {Directive} view * @param {Transition} transition */ function reuse(view, transition) { var component = view.childVM; var dataHook = getRouteConfig(component, 'data'); if (dataHook) { loadData(component, transition, dataHook); } } /** * Asynchronously load and apply data to component. * * @param {Vue} component * @param {Transition} transition * @param {Function} hook * @param {Function} cb * @param {Function} cleanup */ function loadData(component, transition, hook, cb, cleanup) { component.$loadingRouteData = true; transition.callHooks(hook, component, function () { component.$loadingRouteData = false; component.$emit('route-data-loaded', component); cb && cb(); }, { cleanup: cleanup, postActivate: true, processData: function processData(data) { // handle promise sugar syntax var promises = []; if (isPlainObject(data)) { Object.keys(data).forEach(function (key) { var val = data[key]; if (isPromise(val)) { promises.push(val.then(function (resolvedVal) { component.$set(key, resolvedVal); })); } else { component.$set(key, val); } }); } if (promises.length) { return promises[0].constructor.all(promises); } } }); } /** * Save the child view for a kept-alive view so that * we can restore it when it is switched back to. * * @param {Directive} view */ function saveChildView(view) { if (view.keepAlive && view.childVM && view.childView) { view.childVM._keepAliveRouterView = view.childView; } view.childView = null; } /** * Check plain object. * * @param {*} val */ function isPlainObject(val) { return Object.prototype.toString.call(val) === '[object Object]'; } /** * A RouteTransition object manages the pipeline of a * router-view switching process. This is also the object * passed into user route hooks. * * @param {Router} router * @param {Route} to * @param {Route} from */ var RouteTransition = (function () { function RouteTransition(router, to, from) { babelHelpers.classCallCheck(this, RouteTransition); this.router = router; this.to = to; this.from = from; this.next = null; this.aborted = false; this.done = false; } /** * Abort current transition and return to previous location. */ RouteTransition.prototype.abort = function abort() { if (!this.aborted) { this.aborted = true; // if the root path throws an error during validation // on initial load, it gets caught in an infinite loop. var abortingOnLoad = !this.from.path && this.to.path === '/'; if (!abortingOnLoad) { this.router.replace(this.from.path || '/'); } } }; /** * Abort current transition and redirect to a new location. * * @param {String} path */ RouteTransition.prototype.redirect = function redirect(path) { if (!this.aborted) { this.aborted = true; if (typeof path === 'string') { path = mapParams(path, this.to.params, this.to.query); } else { path.params = path.params || this.to.params; path.query = path.query || this.to.query; } this.router.replace(path); } }; /** * A router view transition's pipeline can be described as * follows, assuming we are transitioning from an existing * <router-view> chain [Component A, Component B] to a new * chain [Component A, Component C]: * * A A * | => | * B C * * 1. Reusablity phase: * -> canReuse(A, A) * -> canReuse(B, C) * -> determine new queues: * - deactivation: [B] * - activation: [C] * * 2. Validation phase: * -> canDeactivate(B) * -> canActivate(C) * * 3. Activation phase: * -> deactivate(B) * -> activate(C) * * Each of these steps can be asynchronous, and any * step can potentially abort the transition. * * @param {Function} cb */ RouteTransition.prototype.start = function start(cb) { var transition = this; // determine the queue of views to deactivate var deactivateQueue = []; var view = this.router._rootView; while (view) { deactivateQueue.unshift(view); view = view.childView; } var reverseDeactivateQueue = deactivateQueue.slice().reverse(); // determine the queue of route handlers to activate var activateQueue = this.activateQueue = toArray(this.to.matched).map(function (match) { return match.handler; }); // 1. Reusability phase var i = undefined, reuseQueue = undefined; for (i = 0; i < reverseDeactivateQueue.length; i++) { if (!canReuse(reverseDeactivateQueue[i], activateQueue[i], transition)) { break; } } if (i > 0) { reuseQueue = reverseDeactivateQueue.slice(0, i); deactivateQueue = reverseDeactivateQueue.slice(i).reverse(); activateQueue = activateQueue.slice(i); } // 2. Validation phase transition.runQueue(deactivateQueue, canDeactivate, function () { transition.runQueue(activateQueue, canActivate, function () { transition.runQueue(deactivateQueue, deactivate, function () { // 3. Activation phase // Update router current route transition.router._onTransitionValidated(transition); // trigger reuse for all reused views reuseQueue && reuseQueue.forEach(function (view) { return reuse(view, transition); }); // the root of the chain that needs to be replaced // is the top-most non-reusable view. if (deactivateQueue.length) { var _view = deactivateQueue[deactivateQueue.length - 1]; var depth = reuseQueue ? reuseQueue.length : 0; activate(_view, transition, depth, cb); } else { cb(); } }); }); }); }; /** * Asynchronously and sequentially apply a function to a * queue. * * @param {Array} queue * @param {Function} fn * @param {Function} cb */ RouteTransition.prototype.runQueue = function runQueue(queue, fn, cb) { var transition = this; step(0); function step(index) { if (index >= queue.length) { cb(); } else { fn(queue[index], transition, function () { step(index + 1); }); } } }; /** * Call a user provided route transition hook and handle * the response (e.g. if the user returns a promise). * * If the user neither expects an argument nor returns a * promise, the hook is assumed to be synchronous. * * @param {Function} hook * @param {*} [context] * @param {Function} [cb] * @param {Object} [options] * - {Boolean} expectBoolean * - {Boolean} postActive * - {Function} processData * - {Function} cleanup */ RouteTransition.prototype.callHook = function callHook(hook, context, cb) { var _ref = arguments.length <= 3 || arguments[3] === undefined ? {} : arguments[3]; var _ref$expectBoolean = _ref.expectBoolean; var expectBoolean = _ref$expectBoolean === undefined ? false : _ref$expectBoolean; var _ref$postActivate = _ref.postActivate; var postActivate = _ref$postActivate === undefined ? false : _ref$postActivate; var processData = _ref.processData; var cleanup = _ref.cleanup; var transition = this; var nextCalled = false; // abort the transition var abort = function abort() { cleanup && cleanup(); transition.abort(); }; // handle errors var onError = function onError(err) { postActivate ? next() : abort(); if (err && !transition.router._suppress) { warn$1('Uncaught error during transition: '); throw err instanceof Error ? err : new Error(err); } }; // since promise swallows errors, we have to // throw it in the next tick... var onPromiseError = function onPromiseError(err) { try { onError(err); } catch (e) { setTimeout(function () { throw e; }, 0); } }; // advance the transition to the next step var next = function next() { if (nextCalled) { warn$1('transition.next() should be called only once.'); return; } nextCalled = true; if (transition.aborted) { cleanup && cleanup(); return; } cb && cb(); }; var nextWithBoolean = function nextWithBoolean(res) { if (typeof res === 'boolean') { res ? next() : abort(); } else if (isPromise(res)) { res.then(function (ok) { ok ? next() : abort(); }, onPromiseError); } else if (!hook.length) { next(); } }; var nextWithData = function nextWithData(data) { var res = undefined; try { res = processData(data); } catch (err) { return onError(err); } if (isPromise(res)) { res.then(next, onPromiseError); } else { next(); } }; // expose a clone of the transition object, so that each // hook gets a clean copy and prevent the user from // messing with the internals. var exposed = { to: transition.to, from: transition.from, abort: abort, next: processData ? nextWithData : next, redirect: function redirect() { transition.redirect.apply(transition, arguments); } }; // actually call the hook var res = undefined; try { res = hook.call(context, exposed); } catch (err) { return onError(err); } if (expectBoolean) { // boolean hooks nextWithBoolean(res); } else if (isPromise(res)) { // promise if (processData) { res.then(nextWithData, onPromiseError); } else { res.then(next, onPromiseError); } } else if (processData && isPlainOjbect(res)) { // data promise sugar nextWithData(res); } else if (!hook.length) { next(); } }; /** * Call a single hook or an array of async hooks in series. * * @param {Array} hooks * @param {*} context * @param {Function} cb * @param {Object} [options] */ RouteTransition.prototype.callHooks = function callHooks(hooks, context, cb, options) { var _this = this; if (Array.isArray(hooks)) { this.runQueue(hooks, function (hook, _, next) { if (!_this.aborted) { _this.callHook(hook, context, next, options); } }, cb); } else { this.callHook(hooks, context, cb, options); } }; return RouteTransition; })(); function isPlainOjbect(val) { return Object.prototype.toString.call(val) === '[object Object]'; } function toArray(val) { return val ? Array.prototype.slice.call(val) : []; } var internalKeysRE = /^(component|subRoutes|fullPath)$/; /** * Route Context Object * * @param {String} path * @param {Router} router */ var Route = function Route(path, router) { var _this = this; babelHelpers.classCallCheck(this, Route); var matched = router._recognizer.recognize(path); if (matched) { // copy all custom fields from route configs [].forEach.call(matched, function (match) { for (var key in match.handler) { if (!internalKeysRE.test(key)) { _this[key] = match.handler[key]; } } }); // set query and params this.query = matched.queryParams; this.params = [].reduce.call(matched, function (prev, cur) { if (cur.params) { for (var key in cur.params) { prev[key] = cur.params[key]; } } return prev; }, {}); } // expose path and router this.path = path; // for internal use this.matched = matched || router._notFoundHandler; // internal reference to router Object.defineProperty(this, 'router', { enumerable: false, value: router }); // Important: freeze self to prevent observation Object.freeze(this); }; function applyOverride (Vue) { var _Vue$util = Vue.util; var extend = _Vue$util.extend; var isArray = _Vue$util.isArray; var defineReactive = _Vue$util.defineReactive; // override Vue's init and destroy process to keep track of router instances var init = Vue.prototype._init; Vue.prototype._init = function (options) { options = options || {}; var root = options._parent || options.parent || this; var router = root.$router; var route = root.$route; if (router) { // expose router this.$router = router; router._children.push(this); /* istanbul ignore if */ if (this._defineMeta) { // 0.12 this._defineMeta('$route', route); } else { // 1.0 defineReactive(this, '$route', route); } } init.call(this, options); }; var destroy = Vue.prototype._destroy; Vue.prototype._destroy = function () { if (!this._isBeingDestroyed && this.$router) { this.$router._children.$remove(this); } destroy.apply(this, arguments); }; // 1.0 only: enable route mixins var strats = Vue.config.optionMergeStrategies; var hooksToMergeRE = /^(data|activate|deactivate)$/; if (strats) { strats.route = function (parentVal, childVal) { if (!childVal) return parentVal; if (!parentVal) return childVal; var ret = {}; extend(ret, parentVal); for (var key in childVal) { var a = ret[key]; var b = childVal[key]; // for data, activate and deactivate, we need to merge them into // arrays similar to lifecycle hooks. if (a && hooksToMergeRE.test(key)) { ret[key] = (isArray(a) ? a : [a]).concat(b); } else { ret[key] = b; } } return ret; }; } } function View (Vue) { var _ = Vue.util; var componentDef = // 0.12 Vue.directive('_component') || // 1.0 Vue.internalDirectives.component; // <router-view> extends the internal component directive var viewDef = _.extend({}, componentDef); // with some overrides _.extend(viewDef, { _isRouterView: true, bind: function bind() { var route = this.vm.$route; /* istanbul ignore if */ if (!route) { warn$1('<router-view> can only be used inside a ' + 'router-enabled app.'); return; } // force dynamic directive so v-component doesn't // attempt to build right now this._isDynamicLiteral = true; // finally, init by delegating to v-component componentDef.bind.call(this); // locate the parent view var parentView = undefined; var parent = this.vm; while (parent) { if (parent._routerView) { parentView = parent._routerView; break; } parent = parent.$parent; } if (parentView) { // register self as a child of the parent view, // instead of activating now. This is so that the // child's activate hook is called after the // parent's has resolved. this.parentView = parentView; parentView.childView = this; } else { // this is the root view! var router = route.router; router._rootView = this; } // handle late-rendered view // two possibilities: // 1. root view rendered after transition has been // validated; // 2. child view rendered after parent view has been // activated. var transition = route.router._currentTransition; if (!parentView && transition.done || parentView && parentView.activated) { var depth = parentView ? parentView.depth + 1 : 0; activate(this, transition, depth); } }, unbind: function unbind() { if (this.parentView) { this.parentView.childView = null; } componentDef.unbind.call(this); } }); Vue.elementDirective('router-view', viewDef); } var trailingSlashRE = /\/$/; var regexEscapeRE = /[-.*+?^${}()|[\]\/\\]/g; var queryStringRE = /\?.*$/; // install v-link, which provides navigation support for // HTML5 history mode function Link (Vue) { var _Vue$util = Vue.util; var _bind = _Vue$util.bind; var isObject = _Vue$util.isObject; var addClass = _Vue$util.addClass; var removeClass = _Vue$util.removeClass; var onPriority = Vue.directive('on').priority; var LINK_UPDATE = '__vue-router-link-update__'; var activeId = 0; Vue.directive('link-active', { priority: 9999, bind: function bind() { var _this = this; var id = String(activeId++); // collect v-links contained within this element. // we need do this here before the parent-child relationship // gets messed up by terminal directives (if, for, components) var childLinks = this.el.querySelectorAll('[v-link]'); for (var i = 0, l = childLinks.length; i < l; i++) { var link = childLinks[i]; var existingId = link.getAttribute(LINK_UPDATE); var value = existingId ? existingId + ',' + id : id; // leave a mark on the link element which can be persisted // through fragment clones. link.setAttribute(LINK_UPDATE, value); } this.vm.$on(LINK_UPDATE, this.cb = function (link, path) { if (link.activeIds.indexOf(id) > -1) { link.updateClasses(path, _this.el); } }); }, unbind: function unbind() { this.vm.$off(LINK_UPDATE, this.cb); } }); Vue.directive('link', { priority: onPriority - 2, bind: function bind() { var vm = this.vm; /* istanbul ignore if */ if (!vm.$route) { warn$1('v-link can only be used inside a router-enabled app.'); return; } this.router = vm.$route.router; // update things when the route changes this.unwatch = vm.$watch('$route', _bind(this.onRouteUpdate, this)); // check v-link-active ids var activeIds = this.el.getAttribute(LINK_UPDATE); if (activeIds) { this.el.removeAttribute(LINK_UPDATE); this.activeIds = activeIds.split(','); } // no need to handle click if link expects to be opened // in a new window/tab. /* istanbul ignore if */ if (this.el.tagName === 'A' && this.el.getAttribute('target') === '_blank') { return; } // handle click this.handler = _bind(this.onClick, this); this.el.addEventListener('click', this.handler); }, update: function update(target) { this.target = target; if (isObject(target)) { this.append = target.append; this.exact = target.exact; this.prevActiveClass = this.activeClass; this.activeClass = target.activeClass; } this.onRouteUpdate(this.vm.$route); }, onClick: function onClick(e) { // don't redirect with control keys /* istanbul ignore if */ if (e.metaKey || e.ctrlKey || e.shiftKey) return; // don't redirect when preventDefault called /* istanbul ignore if */ if (e.defaultPrevented) return; // don't redirect on right click /* istanbul ignore if */ if (e.button !== 0) return; var target = this.target; if (target) { // v-link with expression, just go e.preventDefault(); this.router.go(target); } else { // no expression, delegate for an <a> inside var el = e.target; while (el.tagName !== 'A' && el !== this.el) { el = el.parentNode; } if (el.tagName === 'A' && sameOrigin(el)) { e.preventDefault(); var path = el.pathname; if (this.router.history.root) { path = path.replace(this.router.history.rootRE, ''); } this.router.go({ path: path, replace: target && target.replace, append: target && target.append }); } } }, onRouteUpdate: function onRouteUpdate(route) { // router.stringifyPath is dependent on current route // and needs to be called again whenver route changes. var newPath = this.router.stringifyPath(this.target); if (this.path !== newPath) { this.path = newPath; this.updateActiveMatch(); this.updateHref(); } if (this.activeIds) { this.vm.$emit(LINK_UPDATE, this, route.path); } else { this.updateClasses(route.path, this.el); } }, updateActiveMatch: function updateActiveMatch() { this.activeRE = this.path && !this.exact ? new RegExp('^' + this.path.replace(/\/$/, '').replace(queryStringRE, '').replace(regexEscapeRE, '\\$&') + '(\\/|$)') : null; }, updateHref: function updateHref() { if (this.el.tagName !== 'A') { return; } var path = this.path; var router = this.router; var isAbsolute = path.charAt(0) === '/'; // do not format non-hash relative paths var href = path && (router.mode === 'hash' || isAbsolute) ? router.history.formatPath(path, this.append) : path; if (href) { this.el.href = href; } else { this.el.removeAttribute('href'); } }, updateClasses: function updateClasses(path, el) { var activeClass = this.activeClass || this.router._linkActiveClass; // clear old class if (this.prevActiveClass && this.prevActiveClass !== activeClass) { toggleClasses(el, this.prevActiveClass, removeClass); } // remove query string before matching var dest = this.path.replace(queryStringRE, ''); path = path.replace(queryStringRE, ''); // add new class if (this.exact) { if (dest === path || // also allow additional trailing slash dest.charAt(dest.length - 1) !== '/' && dest === path.replace(trailingSlashRE, '')) { toggleClasses(el, activeClass, addClass); } else { toggleClasses(el, activeClass, removeClass); } } else { if (this.activeRE && this.activeRE.test(path)) { toggleClasses(el, activeClass, addClass); } else { toggleClasses(el, activeClass, removeClass); } } }, unbind: function unbind() { this.el.removeEventListener('click', this.handler); this.unwatch && this.unwatch(); } }); function sameOrigin(link) { return link.protocol === location.protocol && link.hostname === location.hostname && link.port === location.port; } // this function is copied from v-bind:class implementation until // we properly expose it... function toggleClasses(el, key, fn) { key = key.trim(); if (key.indexOf(' ') === -1) { fn(el, key); return; } var keys = key.split(/\s+/); for (var i = 0, l = keys.length; i < l; i++) { fn(el, keys[i]); } } } var historyBackends = { abstract: AbstractHistory, hash: HashHistory, html5: HTML5History }; // late bind during install var Vue = undefined; /** * Router constructor * * @param {Object} [options] */ var Router = (function () { function Router() { var _this = this; var _ref = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0]; var _ref$hashbang = _ref.hashbang; var hashbang = _ref$hashbang === undefined ? true : _ref$hashbang; var _ref$abstract = _ref.abstract; var abstract = _ref$abstract === undefined ? false : _ref$abstract; var _ref$history = _ref.history; var history = _ref$history === undefined ? false : _ref$history; var _ref$saveScrollPosition = _ref.saveScrollPosition; var saveScrollPosition = _ref$saveScrollPosition === undefined ? false : _ref$saveScrollPosition; var _ref$transitionOnLoad = _ref.transitionOnLoad; var transitionOnLoad = _ref$transitionOnLoad === undefined ? false : _ref$transitionOnLoad; var _ref$suppressTransitionError = _ref.suppressTransitionError; var suppressTransitionError = _ref$suppressTransitionError === undefined ? false : _ref$suppressTransitionError; var _ref$root = _ref.root; var root = _ref$root === undefined ? null : _ref$root; var _ref$linkActiveClass = _ref.linkActiveClass; var linkActiveClass = _ref$linkActiveClass === undefined ? 'v-link-active' : _ref$linkActiveClass; babelHelpers.classCallCheck(this, Router); /* istanbul ignore if */ if (!Router.installed) { throw new Error('Please install the Router with Vue.use() before ' + 'creating an instance.'); } // Vue instances this.app = null; this._children = []; // route recognizer this._recognizer = new RouteRecognizer(); this._guardRecognizer = new RouteRecognizer(); // state this._started = false; this._startCb = null; this._currentRoute = {}; this._currentTransition = null; this._previousTransition = null; this._notFoundHandler = null; this._notFoundRedirect = null; this._beforeEachHooks = []; this._afterEachHooks = []; // trigger transition on initial render? this._rendered = false; this._transitionOnLoad = transitionOnLoad; // history mode this._root = root; this._abstract = abstract; this._hashbang = hashbang; // check if HTML5 history is available var hasPushState = typeof window !== 'undefined' && window.history && window.history.pushState; this._history = history && hasPushState; this._historyFallback = history && !hasPushState; // create history object var inBrowser = Vue.util.inBrowser; this.mode = !inBrowser || this._abstract ? 'abstract' : this._history ? 'html5' : 'hash'; var History = historyBackends[this.mode]; this.history = new History({ root: root, hashbang: this._hashbang, onChange: function onChange(path, state, anchor) { _this._match(path, state, anchor); } }); // other options this._saveScrollPosition = saveScrollPosition; this._linkActiveClass = linkActiveClass; this._suppress = suppressTransitionError; } /** * Allow directly passing components to a route * definition. * * @param {String} path * @param {Object} handler */ // API =================================================== /** * Register a map of top-level paths. * * @param {Object} map */ Router.prototype.map = function map(_map) { for (var route in _map) { this.on(route, _map[route]); } return this; }; /** * Register a single root-level path * * @param {String} rootPath * @param {Object} handler * - {String} component * - {Object} [subRoutes] * - {Boolean} [forceRefresh] * - {Function} [before] * - {Function} [after] */ Router.prototype.on = function on(rootPath, handler) { if (rootPath === '*') { this._notFound(handler); } else { this._addRoute(rootPath, handler, []); } return this; }; /** * Set redirects. * * @param {Object} map */ Router.prototype.redirect = function redirect(map) { for (var path in map) { this._addRedirect(path, map[path]); } return this; }; /** * Set aliases. * * @param {Object} map */ Router.prototype.alias = function alias(map) { for (var path in map) { this._addAlias(path, map[path]); } return this; }; /** * Set global before hook. * * @param {Function} fn */ Router.prototype.beforeEach = function beforeEach(fn) { this._beforeEachHooks.push(fn); return this; }; /** * Set global after hook. * * @param {Function} fn */ Router.prototype.afterEach = function afterEach(fn) { this._afterEachHooks.push(fn); return this; }; /** * Navigate to a given path. * The path can be an object describing a named path in * the format of { name: '...', params: {}, query: {}} * The path is assumed to be already decoded, and will * be resolved against root (if provided) * * @param {String|Object} path * @param {Boolean} [replace] */ Router.prototype.go = function go(path) { var replace = false; var append = false; if (Vue.util.isObject(path)) { replace = path.replace; append = path.append; } path = this.stringifyPath(path); if (path) { this.history.go(path, replace, append); } }; /** * Short hand for replacing current path * * @param {String} path */ Router.prototype.replace = function replace(path) { if (typeof path === 'string') { path = { path: path }; } path.replace = true; this.go(path); }; /** * Start the router. * * @param {VueConstructor} App * @param {String|Element} container * @param {Function} [cb] */ Router.prototype.start = function start(App, container, cb) { /* istanbul ignore if */ if (this._started) { warn$1('already started.'); return; } this._started = true; this._startCb = cb; if (!this.app) { /* istanbul ignore if */ if (!App || !container) { throw new Error('Must start vue-router with a component and a ' + 'root container.'); } /* istanbul ignore if */ if (App instanceof Vue) { throw new Error('Must start vue-router with a component, not a ' + 'Vue instance.'); } this._appContainer = container; var Ctor = this._appConstructor = typeof App === 'function' ? App : Vue.extend(App); // give it a name for better debugging Ctor.options.name = Ctor.options.name || 'RouterApp'; } // handle history fallback in browsers that do not // support HTML5 history API if (this._historyFallback) { var _location = window.location; var _history = new HTML5History({ root: this._root }); var path = _history.root ? _location.pathname.replace(_history.rootRE, '') : _location.pathname; if (path && path !== '/') { _location.assign((_history.root || '') + '/' + this.history.formatPath(path) + _location.search); return; } } this.history.start(); }; /** * Stop listening to route changes. */ Router.prototype.stop = function stop() { this.history.stop(); this._started = false; }; /** * Normalize named route object / string paths into * a string. * * @param {Object|String|Number} path * @return {String} */ Router.prototype.stringifyPath = function stringifyPath(path) { var generatedPath = ''; if (path && typeof path === 'object') { if (path.name) { var extend = Vue.util.extend; var currentParams = this._currentTransition && this._currentTransition.to.params; var targetParams = path.params || {}; var params = currentParams ? extend(extend({}, currentParams), targetParams) : targetParams; generatedPath = encodeURI(this._recognizer.generate(path.name, params)); } else if (path.path) { generatedPath = encodeURI(path.path); } if (path.query) { // note: the generated query string is pre-URL-encoded by the recognizer var query = this._recognizer.generateQueryString(path.query); if (generatedPath.indexOf('?') > -1) { generatedPath += '&' + query.slice(1); } else { generatedPath += query; } } } else { generatedPath = encodeURI(path ? path + '' : ''); } return generatedPath; }; // Internal methods ====================================== /** * Add a route containing a list of segments to the internal * route recognizer. Will be called recursively to add all * possible sub-routes. * * @param {String} path * @param {Object} handler * @param {Array} segments */ Router.prototype._addRoute = function _addRoute(path, handler, segments) { guardComponent(path, handler); handler.path = path; handler.fullPath = (segments.reduce(function (path, segment) { return path + segment.path; }, '') + path).replace('//', '/'); segments.push({ path: path, handler: handler }); this._recognizer.add(segments, { as: handler.name }); // add sub routes if (handler.subRoutes) { for (var subPath in handler.subRoutes) { // recursively walk all sub routes this._addRoute(subPath, handler.subRoutes[subPath], // pass a copy in recursion to avoid mutating // across branches segments.slice()); } } }; /** * Set the notFound route handler. * * @param {Object} handler */ Router.prototype._notFound = function _notFound(handler) { guardComponent('*', handler); this._notFoundHandler = [{ handler: handler }]; }; /** * Add a redirect record. * * @param {String} path * @param {String} redirectPath */ Router.prototype._addRedirect = function _addRedirect(path, redirectPath) { if (path === '*') { this._notFoundRedirect = redirectPath; } else { this._addGuard(path, redirectPath, this.replace); } }; /** * Add an alias record. * * @param {String} path * @param {String} aliasPath */ Router.prototype._addAlias = function _addAlias(path, aliasPath) { this._addGuard(path, aliasPath, this._match); }; /** * Add a path guard. * * @param {String} path * @param {String} mappedPath * @param {Function} handler */ Router.prototype._addGuard = function _addGuard(path, mappedPath, _handler) { var _this2 = this; this._guardRecognizer.add([{ path: path, handler: function handler(match, query) { var realPath = mapParams(mappedPath, match.params, query); _handler.call(_this2, realPath); } }]); }; /** * Check if a path matches any redirect records. * * @param {String} path * @return {Boolean} - if true, will skip normal match. */ Router.prototype._checkGuard = function _checkGuard(path) { var matched = this._guardRecognizer.recognize(path, true); if (matched) { matched[0].handler(matched[0], matched.queryParams); return true; } else if (this._notFoundRedirect) { matched = this._recognizer.recognize(path); if (!matched) { this.replace(this._notFoundRedirect); return true; } } }; /** * Match a URL path and set the route context on vm, * triggering view updates. * * @param {String} path * @param {Object} [state] * @param {String} [anchor] */ Router.prototype._match = function _match(path, state, anchor) { var _this3 = this; if (this._checkGuard(path)) { return; } var currentRoute = this._currentRoute; var currentTransition = this._currentTransition; if (currentTransition) { if (currentTransition.to.path === path) { // do nothing if we have an active transition going to the same path return; } else if (currentRoute.path === path) { // We are going to the same path, but we also have an ongoing but // not-yet-validated transition. Abort that transition and reset to // prev transition. currentTransition.aborted = true; this._currentTransition = this._prevTransition; return; } else { // going to a totally different path. abort ongoing transition. currentTransition.aborted = true; } } // construct new route and transition context var route = new Route(path, this); var transition = new RouteTransition(this, route, currentRoute); // current transition is updated right now. // however, current route will only be updated after the transition has // been validated. this._prevTransition = currentTransition; this._currentTransition = transition; if (!this.app) { (function () { // initial render var router = _this3; _this3.app = new _this3._appConstructor({ el: _this3._appContainer, created: function created() { this.$router = router; }, _meta: { $route: route } }); })(); } // check global before hook var beforeHooks = this._beforeEachHooks; var startTransition = function startTransition() { transition.start(function () { _this3._postTransition(route, state, anchor); }); }; if (beforeHooks.length) { transition.runQueue(beforeHooks, function (hook, _, next) { if (transition === _this3._currentTransition) { transition.callHook(hook, null, next, { expectBoolean: true }); } }, startTransition); } else { startTransition(); } if (!this._rendered && this._startCb) { this._startCb.call(null); } // HACK: // set rendered to true after the transition start, so // that components that are acitvated synchronously know // whether it is the initial render. this._rendered = true; }; /** * Set current to the new transition. * This is called by the transition object when the * validation of a route has succeeded. * * @param {Transition} transition */ Router.prototype._onTransitionValidated = function _onTransitionValidated(transition) { // set current route var route = this._currentRoute = transition.to; // update route context for all children if (this.app.$route !== route) { this.app.$route = route; this._children.forEach(function (child) { child.$route = route; }); } // call global after hook if (this._afterEachHooks.length) { this._afterEachHooks.forEach(function (hook) { return hook.call(null, { to: transition.to, from: transition.from }); }); } this._currentTransition.done = true; }; /** * Handle stuff after the transition. * * @param {Route} route * @param {Object} [state] * @param {String} [anchor] */ Router.prototype._postTransition = function _postTransition(route, state, anchor) { // handle scroll positions // saved scroll positions take priority // then we check if the path has an anchor var pos = state && state.pos; if (pos && this._saveScrollPosition) { Vue.nextTick(function () { window.scrollTo(pos.x, pos.y); }); } else if (anchor) { Vue.nextTick(function () { var el = document.getElementById(anchor.slice(1)); if (el) { window.scrollTo(window.scrollX, el.offsetTop); } }); } }; return Router; })(); function guardComponent(path, handler) { var comp = handler.component; if (Vue.util.isPlainObject(comp)) { comp = handler.component = Vue.extend(comp); } /* istanbul ignore if */ if (typeof comp !== 'function') { handler.component = null; warn$1('invalid component for route "' + path + '".'); } } /* Installation */ Router.installed = false; /** * Installation interface. * Install the necessary directives. */ Router.install = function (externalVue) { /* istanbul ignore if */ if (Router.installed) { warn$1('already installed.'); return; } Vue = externalVue; applyOverride(Vue); View(Vue); Link(Vue); exports$1.Vue = Vue; Router.installed = true; }; // auto install /* istanbul ignore if */ if (typeof window !== 'undefined' && window.Vue) { window.Vue.use(Router); } return Router; }));
PypiClean
/Homevee_Dev-0.0.0.0-py3-none-any.whl/Homevee/Item/Device/Sensor/ZWaveSensor.py
from Homevee.Exception import * from Homevee.Item.Device.Sensor import Sensor from Homevee.Utils.Database import Database from Homevee.Utils.DeviceTypes import ZWAVE_SENSOR class ZWaveSensor(Sensor): def __init__(self, name, icon, location, save_data, sensor_type, id=None, value=None): super(ZWaveSensor, self).__init__(name, icon, location, save_data, sensor_type, id=id, value=value) def get_device_type(self): return ZWAVE_SENSOR def delete(self, db=None): try: Database.delete("DELETE FROM ZWAVE_SENSOREN WHERE ID == :id", {'id': self.id}, db) return True except: return False def save_to_db(self, db=None): try: #insert if(self.id is None or self.id == ""): Database.insert("INSERT INTO ZWAVE_SENSOREN (RAUM, SHORTFORM, ICON, SAVE_DATA," "SENSOR_TYPE, VALUE) VALUES (:location, :name, :icon, :save_data," ":sensor_type)", {'location': self.location, 'name': self.name, 'icon': self.icon, 'save_data': self.save_data, 'sensor_type': self.sensor_type}, db) #update else: Database.update("UPDATE ZWAVE_SENSOREN SET RAUM = :location, SHORTFORM = :name, ICON = :icon," "SAVE_DATA = :save_data, SENSOR_TYPE = :sensor_type, VALUE = :value" "WHERE ID = :id", {'location': self.location, 'name': self.name, 'icon': self.icon, 'save_data': self.save_data, 'sensor_type': self.sensor_type, 'value': self.value, 'id': self.id}, db) # TODO add generated id to object except: raise DatabaseSaveFailedException("Could not save room to database") def build_dict(self): dict = { 'name': self.name, 'icon': self.icon, 'value': self.value, 'id': self.id, 'location': self.location, 'save_data': self.save_data, 'sensor_type': self.sensor_type } return dict @staticmethod def load_all_ids_from_db(ids, db=None): return ZWaveSensor.load_all_from_db('SELECT * FROM ZWAVE_SENSOREN WHERE ID IN (%s)' % ','.join('?' * len(ids)), ids, db) @staticmethod def load_all_from_db(query, params, db=None): items = [] for result in Database.select_all(query, params, db): item = ZWaveSensor(result['SHORTFORM'], result['ICON'], result['RAUM'], result['SAVE_DATA'], result['SENSOR_TYPE'], result['ID'], result['VALUE']) items.append(item) return items @staticmethod def load_all(db=None): return ZWaveSensor.load_all_from_db('SELECT * FROM ZWAVE_SENSOREN', {}, db) @staticmethod def create_from_dict(dict): try: name = dict['name'] id = dict['id'] location = dict['location'] value = dict['value'] icon = dict['icon'] save_data = dict['save_data'] sensor_type = dict['sensor_type'] item = ZWaveSensor(name, icon, location, save_data, sensor_type, id, value) return item except: raise InvalidParametersException("Invalid parameters for ZWaveSensor.create_from_dict()")
PypiClean
/Curp-1.3.1.tar.gz/Curp-1.3.1/curp/volume/calrdf.py
from __future__ import print_function import math import numpy def cal_rdf(crd, num_rs, rmax=5.0, dr=0.1, per_area=True): """Calclate a radial distribution function on each atom.""" natom = len(crd) rdfs = numpy.zeros((natom, num_rs)) # rdf on each atom. for rdf_i, r_i in zip(rdfs, crd): for r_j in crd: r_ij = r_i - r_j l_ij = math.sqrt(numpy.dot(r_ij, r_ij)) if l_ij <= 0.1: continue if l_ij >= rmax: continue rindex = int(round(l_ij/dr)) - 1 rdf_i[rindex] += 1 if per_area: for rindex in range(num_rs): r = dr * (rindex + 1) rdfs[:, rindex] = rdfs[:, rindex] / (r*r) return rdfs # replace the calrdf routine to fortran one import lib_calrdf cal_rdf = lib_calrdf.calrdf def average_rdf(parser, rmax=5.0, dr=0.1 , interval=1, average=True, per_area=True): """Calculate a average rdf on each atom.""" num_rs = int(math.ceil(rmax/dr)) ntraj = 0 # calculate first rdfs i = 0 for istep, (crd, box) in parser: i += 1 istep, (crd, box) = parser.next() if i == interval: ntraj += 1 break natom = len(crd) rdfs_total = cal_rdf(crd, num_rs, rmax=rmax, dr=dr, per_area=False) # calclate rest rdfs i = 0 for istep, (crd, box) in parser: i += 1 if i == interval: ntraj += 1 rdfs = cal_rdf(crd, num_rs, rmax=rmax, dr=dr, per_area=False) rdfs_total += rdfs i = 0 if per_area: for rindex in range(num_rs): r = dr * (rindex + 1) rdfs_total[:, rindex] = rdfs_total[:, rindex] / (r*r) if average: return rdfs_total/ntraj else: return rdfs_total if __name__ == '__main__': import os, sys topdir = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', 'parser', 'amber')) if topdir not in sys.path: sys.path.insert(0, topdir) import trajectory crd_fn = '../parser/amber/test/sam-nwat.mdcrd' parser = trajectory.CoordinateParser(crd_fn, 1799) rmax = 2.5 dr = 0.01 from benchmarker import Benchmarker with Benchmarker(width=30) as bm: # with bm('python code: calculation'): # rdfs_py = cal_rdf(crd, num_rs, rmax=rmax, dr=dr, per_area=True) # with bm('fortran code: calculation'): # rdfs_for = lib_calrdf.calrdf( # crd, num_rs, rmax=rmax, dr=dr, per_area=True) # with bm('printing'): # for iatm_1, (rdf_py, rdf_for) in enumerate(zip(rdfs_py, rdfs_for)): # print() # print(iatm_1+1, rdf_py-rdf_for) with bm('calculate average rdf'): rdfs = average_rdf(parser, rmax=rmax, dr=dr, interval=1, average=False, per_area=False) with bm('printing average rdf'): for iatm_1, rdf in enumerate(rdfs): print() print(iatm_1+1) for rd in rdf: print(rd)
PypiClean
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/__init__.py
__revision__ = "src/engine/SCons/Tool/__init__.py 2014/07/05 09:42:21 garyo" import imp import sys import re import os import shutil import SCons.Builder import SCons.Errors import SCons.Node.FS import SCons.Scanner import SCons.Scanner.C import SCons.Scanner.Prog DefaultToolpath=[] CScanner = SCons.Scanner.C.CScanner() ProgramScanner = SCons.Scanner.Prog.ProgramScanner() SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner') CSuffixes = [".c", ".C", ".cxx", ".cpp", ".c++", ".cc", ".h", ".H", ".hxx", ".hpp", ".hh", ".F", ".fpp", ".FPP", ".m", ".mm", ".S", ".spp", ".SPP", ".sx"] DSuffixes = ['.d'] IDLSuffixes = [".idl", ".IDL"] LaTeXSuffixes = [".tex", ".ltx", ".latex"] for suffix in CSuffixes: SourceFileScanner.add_scanner(suffix, CScanner) class Tool(object): def __init__(self, name, toolpath=[], **kw): self.name = name self.toolpath = toolpath + DefaultToolpath # remember these so we can merge them into the call self.init_kw = kw module = self._tool_module() self.generate = module.generate self.exists = module.exists if hasattr(module, 'options'): self.options = module.options def _tool_module(self): # TODO: Interchange zipimport with normal initilization for better error reporting oldpythonpath = sys.path sys.path = self.toolpath + sys.path try: try: file, path, desc = imp.find_module(self.name, self.toolpath) try: return imp.load_module(self.name, file, path, desc) finally: if file: file.close() except ImportError, e: if str(e)!="No module named %s"%self.name: raise SCons.Errors.EnvironmentError(e) try: import zipimport except ImportError: pass else: for aPath in self.toolpath: try: importer = zipimport.zipimporter(aPath) return importer.load_module(self.name) except ImportError, e: pass finally: sys.path = oldpythonpath full_name = 'SCons.Tool.' + self.name try: return sys.modules[full_name] except KeyError: try: smpath = sys.modules['SCons.Tool'].__path__ try: file, path, desc = imp.find_module(self.name, smpath) module = imp.load_module(full_name, file, path, desc) setattr(SCons.Tool, self.name, module) if file: file.close() return module except ImportError, e: if str(e)!="No module named %s"%self.name: raise SCons.Errors.EnvironmentError(e) try: import zipimport importer = zipimport.zipimporter( sys.modules['SCons.Tool'].__path__[0] ) module = importer.load_module(full_name) setattr(SCons.Tool, self.name, module) return module except ImportError, e: m = "No tool named '%s': %s" % (self.name, e) raise SCons.Errors.EnvironmentError(m) except ImportError, e: m = "No tool named '%s': %s" % (self.name, e) raise SCons.Errors.EnvironmentError(m) def __call__(self, env, *args, **kw): if self.init_kw is not None: # Merge call kws into init kws; # but don't bash self.init_kw. if kw is not None: call_kw = kw kw = self.init_kw.copy() kw.update(call_kw) else: kw = self.init_kw env.Append(TOOLS = [ self.name ]) if hasattr(self, 'options'): import SCons.Variables if 'options' not in env: from SCons.Script import ARGUMENTS env['options']=SCons.Variables.Variables(args=ARGUMENTS) opts=env['options'] self.options(opts) opts.Update(env) self.generate(env, *args, **kw) def __str__(self): return self.name ########################################################################## # Create common executable program / library / object builders def createProgBuilder(env): """This is a utility function that creates the Program Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: program = env['BUILDERS']['Program'] except KeyError: import SCons.Defaults program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction, emitter = '$PROGEMITTER', prefix = '$PROGPREFIX', suffix = '$PROGSUFFIX', src_suffix = '$OBJSUFFIX', src_builder = 'Object', target_scanner = ProgramScanner) env['BUILDERS']['Program'] = program return program def createStaticLibBuilder(env): """This is a utility function that creates the StaticLibrary Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: static_lib = env['BUILDERS']['StaticLibrary'] except KeyError: action_list = [ SCons.Action.Action("$ARCOM", "$ARCOMSTR") ] if env.Detect('ranlib'): ranlib_action = SCons.Action.Action("$RANLIBCOM", "$RANLIBCOMSTR") action_list.append(ranlib_action) static_lib = SCons.Builder.Builder(action = action_list, emitter = '$LIBEMITTER', prefix = '$LIBPREFIX', suffix = '$LIBSUFFIX', src_suffix = '$OBJSUFFIX', src_builder = 'StaticObject') env['BUILDERS']['StaticLibrary'] = static_lib env['BUILDERS']['Library'] = static_lib return static_lib def VersionShLibLinkNames(version, libname, env): """Generate names of symlinks to the versioned shared library""" Verbose = False platform = env.subst('$PLATFORM') shlib_suffix = env.subst('$SHLIBSUFFIX') shlink_flags = SCons.Util.CLVar(env.subst('$SHLINKFLAGS')) linknames = [] if version.count(".") != 2: # We need a version string of the form x.y.z to proceed # Several changes need to be made to support versions like x.y raise ValueError if platform == 'darwin': # For libfoo.x.y.z.dylib, linknames libfoo.so suffix_re = re.escape('.' + version + shlib_suffix) linkname = re.sub(suffix_re, shlib_suffix, libname) if Verbose: print "VersionShLibLinkNames: linkname = ",linkname linknames.append(linkname) elif platform == 'posix': if sys.platform.startswith('openbsd'): # OpenBSD uses x.y shared library versioning numbering convention # and doesn't use symlinks to backwards-compatible libraries return [] # For libfoo.so.x.y.z, linknames libfoo.so libfoo.so.x.y libfoo.so.x suffix_re = re.escape(shlib_suffix + '.' + version) # First linkname has no version number linkname = re.sub(suffix_re, shlib_suffix, libname) if Verbose: print "VersionShLibLinkNames: linkname = ",linkname linknames.append(linkname) versionparts = version.split('.') major_name = linkname + "." + versionparts[0] minor_name = major_name + "." + versionparts[1] #Only add link for major_name #for linkname in [major_name, minor_name]: for linkname in [major_name, ]: if Verbose: print "VersionShLibLinkNames: linkname ",linkname, ", target ",libname linknames.append(linkname) # note: no Windows case here (win32 or cygwin); # MSVC doesn't support this type of versioned shared libs. # (could probably do something for MinGW though) return linknames def VersionedSharedLibrary(target = None, source= None, env=None): """Build a shared library. If the environment has SHLIBVERSION defined make a versioned shared library and create the appropriate symlinks for the platform we are on""" Verbose = False try: version = env.subst('$SHLIBVERSION') except KeyError: version = None # libname includes the version number if one was given libname = target[0].name platform = env.subst('$PLATFORM') shlib_suffix = env.subst('$SHLIBSUFFIX') shlink_flags = SCons.Util.CLVar(env.subst('$SHLINKFLAGS')) if Verbose: print "VersionShLib: libname = ",libname print "VersionShLib: platform = ",platform print "VersionShLib: shlib_suffix = ",shlib_suffix print "VersionShLib: target = ",str(target[0]) if version: # set the shared library link flags if platform == 'posix': shlink_flags += [ '-Wl,-Bsymbolic' ] # OpenBSD doesn't usually use SONAME for libraries if not sys.platform.startswith('openbsd'): # continue setup of shlink flags for all other POSIX systems suffix_re = re.escape(shlib_suffix + '.' + version) (major, age, revision) = version.split(".") # soname will have only the major version number in it soname = re.sub(suffix_re, shlib_suffix, libname) + '.' + major shlink_flags += [ '-Wl,-soname=%s' % soname ] if Verbose: print " soname ",soname,", shlink_flags ",shlink_flags elif platform == 'cygwin': shlink_flags += [ '-Wl,-Bsymbolic', '-Wl,--out-implib,${TARGET.base}.a' ] elif platform == 'darwin': shlink_flags += [ '-current_version', '%s' % version, '-compatibility_version', '%s' % version, '-undefined', 'dynamic_lookup' ] if Verbose: print "VersionShLib: shlink_flags = ",shlink_flags envlink = env.Clone() envlink['SHLINKFLAGS'] = shlink_flags else: envlink = env result = SCons.Defaults.ShLinkAction(target, source, envlink) if version: # here we need the full pathname so the links end up in the right directory libname = target[0].path linknames = VersionShLibLinkNames(version, libname, env) if Verbose: print "VerShLib: linknames ",linknames # Here we just need the file name w/o path as the target of the link lib_ver = target[0].name # make symlink of adjacent names in linknames for count in range(len(linknames)): linkname = linknames[count] if count > 0: try: os.remove(lastlinkname) except: pass os.symlink(os.path.basename(linkname),lastlinkname) if Verbose: print "VerShLib: made sym link of %s -> %s" % (lastlinkname,linkname) lastlinkname = linkname # finish chain of sym links with link to the actual library if len(linknames)>0: try: os.remove(lastlinkname) except: pass os.symlink(lib_ver,lastlinkname) if Verbose: print "VerShLib: made sym link of %s -> %s" % (linkname, lib_ver) return result # Fix http://scons.tigris.org/issues/show_bug.cgi?id=2903 : # Ensure we still depend on SCons.Defaults.ShLinkAction command line which is $SHLINKCOM. # This was tricky because we don't want changing LIBPATH to cause a rebuild, but # changing other link args should. LIBPATH has $( ... $) around it but until this # fix, when the varlist was added to the build sig those ignored parts weren't getting # ignored. ShLibAction = SCons.Action.Action(VersionedSharedLibrary, None, varlist=['SHLINKCOM']) def createSharedLibBuilder(env): """This is a utility function that creates the SharedLibrary Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: shared_lib = env['BUILDERS']['SharedLibrary'] except KeyError: import SCons.Defaults action_list = [ SCons.Defaults.SharedCheck, ShLibAction ] shared_lib = SCons.Builder.Builder(action = action_list, emitter = "$SHLIBEMITTER", prefix = '$SHLIBPREFIX', suffix = '$SHLIBSUFFIX', target_scanner = ProgramScanner, src_suffix = '$SHOBJSUFFIX', src_builder = 'SharedObject') env['BUILDERS']['SharedLibrary'] = shared_lib return shared_lib def createLoadableModuleBuilder(env): """This is a utility function that creates the LoadableModule Builder in an Environment if it is not there already. If it is already there, we return the existing one. """ try: ld_module = env['BUILDERS']['LoadableModule'] except KeyError: import SCons.Defaults action_list = [ SCons.Defaults.SharedCheck, SCons.Defaults.LdModuleLinkAction ] ld_module = SCons.Builder.Builder(action = action_list, emitter = "$LDMODULEEMITTER", prefix = '$LDMODULEPREFIX', suffix = '$LDMODULESUFFIX', target_scanner = ProgramScanner, src_suffix = '$SHOBJSUFFIX', src_builder = 'SharedObject') env['BUILDERS']['LoadableModule'] = ld_module return ld_module def createObjBuilders(env): """This is a utility function that creates the StaticObject and SharedObject Builders in an Environment if they are not there already. If they are there already, we return the existing ones. This is a separate function because soooo many Tools use this functionality. The return is a 2-tuple of (StaticObject, SharedObject) """ try: static_obj = env['BUILDERS']['StaticObject'] except KeyError: static_obj = SCons.Builder.Builder(action = {}, emitter = {}, prefix = '$OBJPREFIX', suffix = '$OBJSUFFIX', src_builder = ['CFile', 'CXXFile'], source_scanner = SourceFileScanner, single_source = 1) env['BUILDERS']['StaticObject'] = static_obj env['BUILDERS']['Object'] = static_obj try: shared_obj = env['BUILDERS']['SharedObject'] except KeyError: shared_obj = SCons.Builder.Builder(action = {}, emitter = {}, prefix = '$SHOBJPREFIX', suffix = '$SHOBJSUFFIX', src_builder = ['CFile', 'CXXFile'], source_scanner = SourceFileScanner, single_source = 1) env['BUILDERS']['SharedObject'] = shared_obj return (static_obj, shared_obj) def createCFileBuilders(env): """This is a utility function that creates the CFile/CXXFile Builders in an Environment if they are not there already. If they are there already, we return the existing ones. This is a separate function because soooo many Tools use this functionality. The return is a 2-tuple of (CFile, CXXFile) """ try: c_file = env['BUILDERS']['CFile'] except KeyError: c_file = SCons.Builder.Builder(action = {}, emitter = {}, suffix = {None:'$CFILESUFFIX'}) env['BUILDERS']['CFile'] = c_file env.SetDefault(CFILESUFFIX = '.c') try: cxx_file = env['BUILDERS']['CXXFile'] except KeyError: cxx_file = SCons.Builder.Builder(action = {}, emitter = {}, suffix = {None:'$CXXFILESUFFIX'}) env['BUILDERS']['CXXFile'] = cxx_file env.SetDefault(CXXFILESUFFIX = '.cc') return (c_file, cxx_file) ########################################################################## # Create common Java builders def CreateJarBuilder(env): try: java_jar = env['BUILDERS']['Jar'] except KeyError: fs = SCons.Node.FS.get_default_fs() jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR') java_jar = SCons.Builder.Builder(action = jar_com, suffix = '$JARSUFFIX', src_suffix = '$JAVACLASSSUFIX', src_builder = 'JavaClassFile', source_factory = fs.Entry) env['BUILDERS']['Jar'] = java_jar return java_jar def CreateJavaHBuilder(env): try: java_javah = env['BUILDERS']['JavaH'] except KeyError: fs = SCons.Node.FS.get_default_fs() java_javah_com = SCons.Action.Action('$JAVAHCOM', '$JAVAHCOMSTR') java_javah = SCons.Builder.Builder(action = java_javah_com, src_suffix = '$JAVACLASSSUFFIX', target_factory = fs.Entry, source_factory = fs.File, src_builder = 'JavaClassFile') env['BUILDERS']['JavaH'] = java_javah return java_javah def CreateJavaClassFileBuilder(env): try: java_class_file = env['BUILDERS']['JavaClassFile'] except KeyError: fs = SCons.Node.FS.get_default_fs() javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') java_class_file = SCons.Builder.Builder(action = javac_com, emitter = {}, #suffix = '$JAVACLASSSUFFIX', src_suffix = '$JAVASUFFIX', src_builder = ['JavaFile'], target_factory = fs.Entry, source_factory = fs.File) env['BUILDERS']['JavaClassFile'] = java_class_file return java_class_file def CreateJavaClassDirBuilder(env): try: java_class_dir = env['BUILDERS']['JavaClassDir'] except KeyError: fs = SCons.Node.FS.get_default_fs() javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR') java_class_dir = SCons.Builder.Builder(action = javac_com, emitter = {}, target_factory = fs.Dir, source_factory = fs.Dir) env['BUILDERS']['JavaClassDir'] = java_class_dir return java_class_dir def CreateJavaFileBuilder(env): try: java_file = env['BUILDERS']['JavaFile'] except KeyError: java_file = SCons.Builder.Builder(action = {}, emitter = {}, suffix = {None:'$JAVASUFFIX'}) env['BUILDERS']['JavaFile'] = java_file env['JAVASUFFIX'] = '.java' return java_file class ToolInitializerMethod(object): """ This is added to a construction environment in place of a method(s) normally called for a Builder (env.Object, env.StaticObject, etc.). When called, it has its associated ToolInitializer object search the specified list of tools and apply the first one that exists to the construction environment. It then calls whatever builder was (presumably) added to the construction environment in place of this particular instance. """ def __init__(self, name, initializer): """ Note: we store the tool name as __name__ so it can be used by the class that attaches this to a construction environment. """ self.__name__ = name self.initializer = initializer def get_builder(self, env): """ Returns the appropriate real Builder for this method name after having the associated ToolInitializer object apply the appropriate Tool module. """ builder = getattr(env, self.__name__) self.initializer.apply_tools(env) builder = getattr(env, self.__name__) if builder is self: # There was no Builder added, which means no valid Tool # for this name was found (or possibly there's a mismatch # between the name we were called by and the Builder name # added by the Tool module). return None self.initializer.remove_methods(env) return builder def __call__(self, env, *args, **kw): """ """ builder = self.get_builder(env) if builder is None: return [], [] return builder(*args, **kw) class ToolInitializer(object): """ A class for delayed initialization of Tools modules. Instances of this class associate a list of Tool modules with a list of Builder method names that will be added by those Tool modules. As part of instantiating this object for a particular construction environment, we also add the appropriate ToolInitializerMethod objects for the various Builder methods that we want to use to delay Tool searches until necessary. """ def __init__(self, env, tools, names): if not SCons.Util.is_List(tools): tools = [tools] if not SCons.Util.is_List(names): names = [names] self.env = env self.tools = tools self.names = names self.methods = {} for name in names: method = ToolInitializerMethod(name, self) self.methods[name] = method env.AddMethod(method) def remove_methods(self, env): """ Removes the methods that were added by the tool initialization so we no longer copy and re-bind them when the construction environment gets cloned. """ for method in self.methods.values(): env.RemoveMethod(method) def apply_tools(self, env): """ Searches the list of associated Tool modules for one that exists, and applies that to the construction environment. """ for t in self.tools: tool = SCons.Tool.Tool(t) if tool.exists(env): env.Tool(tool) return # If we fall through here, there was no tool module found. # This is where we can put an informative error message # about the inability to find the tool. We'll start doing # this as we cut over more pre-defined Builder+Tools to use # the ToolInitializer class. def Initializers(env): ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs', '_InternalInstallVersionedLib']) def Install(self, *args, **kw): return self._InternalInstall(*args, **kw) def InstallAs(self, *args, **kw): return self._InternalInstallAs(*args, **kw) def InstallVersionedLib(self, *args, **kw): return self._InternalInstallVersionedLib(*args, **kw) env.AddMethod(Install) env.AddMethod(InstallAs) env.AddMethod(InstallVersionedLib) def FindTool(tools, env): for tool in tools: t = Tool(tool) if t.exists(env): return tool return None def FindAllTools(tools, env): def ToolExists(tool, env=env): return Tool(tool).exists(env) return list(filter (ToolExists, tools)) def tool_list(platform, env): other_plat_tools=[] # XXX this logic about what tool to prefer on which platform # should be moved into either the platform files or # the tool files themselves. # The search orders here are described in the man page. If you # change these search orders, update the man page as well. if str(platform) == 'win32': "prefer Microsoft tools on Windows" linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ] c_compilers = ['msvc', 'mingw', 'gcc', 'intelc', 'icl', 'icc', 'cc', 'bcc32' ] cxx_compilers = ['msvc', 'intelc', 'icc', 'g++', 'c++', 'bcc32' ] assemblers = ['masm', 'nasm', 'gas', '386asm' ] fortran_compilers = ['gfortran', 'g77', 'ifl', 'cvf', 'f95', 'f90', 'fortran'] ars = ['mslib', 'ar', 'tlib'] other_plat_tools = ['msvs'] elif str(platform) == 'os2': "prefer IBM tools on OS/2" linkers = ['ilink', 'gnulink', ]#'mslink'] c_compilers = ['icc', 'gcc',]# 'msvc', 'cc'] cxx_compilers = ['icc', 'g++',]# 'msvc', 'c++'] assemblers = ['nasm',]# 'masm', 'gas'] fortran_compilers = ['ifl', 'g77'] ars = ['ar',]# 'mslib'] elif str(platform) == 'irix': "prefer MIPSPro on IRIX" linkers = ['sgilink', 'gnulink'] c_compilers = ['sgicc', 'gcc', 'cc'] cxx_compilers = ['sgic++', 'g++', 'c++'] assemblers = ['as', 'gas'] fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] ars = ['sgiar'] elif str(platform) == 'sunos': "prefer Forte tools on SunOS" linkers = ['sunlink', 'gnulink'] c_compilers = ['suncc', 'gcc', 'cc'] cxx_compilers = ['sunc++', 'g++', 'c++'] assemblers = ['as', 'gas'] fortran_compilers = ['sunf95', 'sunf90', 'sunf77', 'f95', 'f90', 'f77', 'gfortran', 'g77', 'fortran'] ars = ['sunar'] elif str(platform) == 'hpux': "prefer aCC tools on HP-UX" linkers = ['hplink', 'gnulink'] c_compilers = ['hpcc', 'gcc', 'cc'] cxx_compilers = ['hpc++', 'g++', 'c++'] assemblers = ['as', 'gas'] fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran'] ars = ['ar'] elif str(platform) == 'aix': "prefer AIX Visual Age tools on AIX" linkers = ['aixlink', 'gnulink'] c_compilers = ['aixcc', 'gcc', 'cc'] cxx_compilers = ['aixc++', 'g++', 'c++'] assemblers = ['as', 'gas'] fortran_compilers = ['f95', 'f90', 'aixf77', 'g77', 'fortran'] elif str(platform) == 'darwin': "prefer GNU tools on Mac OS X, except for some linkers and IBM tools" linkers = ['applelink', 'gnulink'] c_compilers = ['gcc', 'cc'] cxx_compilers = ['g++', 'c++'] assemblers = ['as'] fortran_compilers = ['gfortran', 'f95', 'f90', 'g77'] ars = ['ar'] elif str(platform) == 'cygwin': "prefer GNU tools on Cygwin, except for a platform-specific linker" linkers = ['cyglink', 'mslink', 'ilink'] c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc'] cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'c++'] assemblers = ['gas', 'nasm', 'masm'] ars = ['ar', 'mslib'] else: "prefer GNU tools on all other platforms" linkers = ['gnulink', 'mslink', 'ilink'] c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc'] cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'c++'] assemblers = ['gas', 'nasm', 'masm'] ars = ['ar', 'mslib'] c_compiler = FindTool(c_compilers, env) or c_compilers[0] # XXX this logic about what tool provides what should somehow be # moved into the tool files themselves. if c_compiler and c_compiler == 'mingw': # MinGW contains a linker, C compiler, C++ compiler, # Fortran compiler, archiver and assembler: cxx_compiler = None linker = None assembler = None ar = None else: # Don't use g++ if the C compiler has built-in C++ support: if c_compiler in ('msvc', 'intelc', 'icc'): cxx_compiler = None else: cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0] linker = FindTool(linkers, env) or linkers[0] assembler = FindTool(assemblers, env) or assemblers[0] ar = FindTool(ars, env) or ars[0] tools = [linker, c_compiler, cxx_compiler, assembler, ar] return [x for x in tools if x] # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
PypiClean
/M_A_1249-0.0.6.tar.gz/M_A_1249-0.0.6/MAnaLib/analysis/visualizations.py
def line(df, x_col, y_col, title='', x_title='', y_title='', color='blue', width=800, height=500, show_legend=True, filename=None): """ Creates a line plot from a pandas DataFrame using Plotly. Parameters: df (pandas DataFrame): The DataFrame to plot. x_col (str): The name of the column to use for the x-axis. y_col (str): The name of the column to use for the y-axis. title (str): The title of the plot. x_title (str): The title of the x-axis. y_title (str): The title of the y-axis. color (str): The color of the line. width (int): The width of the plot in pixels. height (int): The height of the plot in pixels. show_legend (bool): Whether to show the legend or not. filename (str): The name of the file to save the plot to (including file extension). If not provided, the plot will not be saved. Returns: None """ import plotly.graph_objs as go import os fig = go.Figure() fig.add_trace(go.Scatter(x=df[x_col], y=df[y_col], mode='lines', line_color=color, name=y_col)) fig.update_layout(title=title, xaxis_title=x_title, yaxis_title=y_title, width=width, height=height, showlegend=show_legend) fig.show() if filename: if not filename.endswith('.html'): filename += '.html' filepath = os.path.join(os.getcwd(), filename) fig.write_html(filepath) def scatter(df, x_col, y_col, color_col=None, size_col=None, title=None, x_title=None, y_title=None, width=None, height=None, template='plotly_white', mode='markers', symbol='circle', opacity=0.7, marker=None, filename=None, auto_open=True): """ Outputs a scatter plot based on data from a data frame using plotly. Parameters: df (pandas.DataFrame): The data frame containing the data to plot. x_col (str): The name of the column to use for the x-axis. y_col (str): The name of the column to use for the y-axis. color_col (str, optional): The name of the column to use for coloring the data points. size_col (str, optional): The name of the column to use for sizing the data points. title (str, optional): The title of the plot. x_title (str, optional): The title of the x-axis. y_title (str, optional): The title of the y-axis. width (int, optional): The width of the plot in pixels. height (int, optional): The height of the plot in pixels. template (str, optional): The plotly template to use. mode (str, optional): The mode of the plot (markers or lines). symbol (str, optional): The symbol of the markers (only used if mode='markers'). opacity (float, optional): The opacity of the markers (only used if mode='markers'). marker (dict, optional): A dictionary of marker options (only used if mode='markers'). filename (str, optional): The filename to use for saving the plot. If not provided, the plot will not be saved. auto_open (bool, optional): Whether to automatically open the plot in a new browser tab. Returns: fig: The plotly figure object. """ import os import plotly.express as px fig = px.scatter(df, x=x_col, y=y_col, color=color_col, size=size_col, title=title, labels={x_col:x_title, y_col:y_title}, width=width, height=height, template=template, symbol=symbol, opacity=opacity, marker=marker, mode=mode, trendline='ols') if filename: if not filename.endswith('.html'): filename = filename + '.html' filepath = os.path.join(os.getcwd(), filename) fig.write_html(filepath, auto_open=auto_open) return fig def bar(df, x_col, y_col, color_col=None, title='', x_title='', y_title='', width=800, height=500, template='plotly_white', filename=None, auto_open=True): """ Creates a bar chart from a pandas DataFrame using Plotly. Parameters: df (pandas DataFrame): The DataFrame to plot. x_col (str): The name of the column to use for the x-axis. y_col (str): The name of the column to use for the y-axis. color_col (str, optional): The name of the column to use for coloring the bars. title (str): The title of the plot. x_title (str): The title of the x-axis. y_title (str): The title of the y-axis. width (int): The width of the plot in pixels. height (int): The height of the plot in pixels. template (str): The plotly template to use. filename (str, optional): The filename to use for saving the plot. If not provided, the plot will not be saved. auto_open (bool): Whether to automatically open the plot in a new browser tab. Returns: None """ import plotly.express as px import os fig = px.bar(df, x=x_col, y=y_col, color=color_col, title=title, labels={x_col: x_title, y_col: y_title}, width=width, height=height, template=template) if filename: if not filename.endswith('.html'): filename = filename + '.html' filepath = os.path.join(os.getcwd(), filename) fig.write_html(filepath, auto_open=auto_open) fig.show() def dist(df,col,lenght = 4, filename=None, auto_open = False): """ Function to plot the distribution of a column in a given pandas DataFrame. Args: df (pandas DataFrame): The DataFrame to use for plotting. col (str): The name of the column to plot the distribution for. lenght (float, optional): The length of the vertical axis in multiples of the DataFrame length. Defaults to 4. filename (str, optional): The filename to use for saving the plot. If not provided, the plot will not be saved. auto_open (bool): Whether to automatically open the plot in a new browser tab. Returns: None """ """ The function first calculates the length of the vertical axis for the plot based on the length of the DataFrame and the specified length parameter. Then, it creates a subplot with a single plot area and adds a histogram trace with the data from the specified column. Next, it adds three vertical line traces to the plot, one at the mean, one at the median, and one at the mode of the specified column. The lines are placed using the length calculated previously, and labeled with the respective statistics. Finally, the function shows the plot using the plotly show() method. """ from plotly.subplots import make_subplots import plotly.graph_objs as go import os l = len(df)*lenght/10 # count some lenght first, fig = make_subplots(rows=1, cols=1) # then creates a subplot and.. fig.add_trace(go.Histogram(x=df[col], name=col)) # give it a histogram plot that shows the distripution, fig.add_trace(go.Line(x=[df[col].mean() for i in range(round(l))], y=list(range(round(l))), name=f'{col}\'s mean')) # a virtical line that shows where the mean is, fig.add_trace(go.Line(x=[df[col].median() for i in range(round(l))], y=list(range(round(l))), name=f'{col}\'s median')) # a virtical line that shows where the median is and fig.add_trace(go.Line(x=[df[col].mode()[0] for i in range(round(l))], y=list(range(round(l))), name=f'{col}\'s mode')) # a virtical line that shows where the mode is. if filename: if not filename.endswith('.html'): filename = filename + '.html' filepath = os.path.join(os.getcwd(), filename) fig.write_html(filepath, auto_open=auto_open) fig.show() def plot_box(df, x_col, y_col, title='', x_title='', y_title='', color='blue', width=800, height=500, show_legend=True): """ Creates a box plot from a pandas DataFrame using Plotly. Parameters: df (pandas DataFrame): The DataFrame to plot. x_col (str): The name of the column to use for the x-axis. y_col (str): The name of the column to use for the y-axis. title (str): The title of the plot. x_title (str): The title of the x-axis. y_title (str): The title of the y-axis. color (str): The color of the box. width (int): The width of the plot in pixels. height (int): The height of the plot in pixels. show_legend (bool): Whether to show the legend or not. download_path (str): The path to save the downloaded plot. Default is the current working directory. Returns: None """ import os import plotly.graph_objs as go download_path=os.getcwd() fig = go.Figure() fig.add_trace(go.Box(x=df[x_col], y=df[y_col], marker_color=color, name=y_col)) fig.update_layout(title=title, xaxis_title=x_title, yaxis_title=y_title, width=width, height=height, showlegend=show_legend) fig.show() # Save the plot as a PNG file in the specified path filename = f"{y_col}_box_plot.png" filepath = os.path.join(download_path, filename) fig.write_image(filepath) def heatmap(df, figsize=(15, 15), cmap = "Greens",linewidths=0.1, annot_kws={"fontsize":10}): """ Generates a heatmap based on the correlation matrix of the provided DataFrame. Parameters: ----------- df : pandas DataFrame The input DataFrame to generate the heatmap from. Returns: -------- seaborn matrix plot The resulting heatmap plot showing the correlation matrix between the columns. """ import matplotlib.pyplot as plt import seaborn as sns # Set the size of the heatmap plt.figure(figsize=figsize) # Generate the heatmap with seaborn return sns.heatmap(df.corr(), annot=True, cmap=cmap, linewidths=linewidths, annot_kws=annot_kws); def pairplot(df, color=None, size=None): """ A function to plot a pair plot for a given dataframe. Parameters: ----------- df : pandas.DataFrame The input dataframe to plot the pair plot for. color : str or None, optional (default=None) A column name of df to use for color encoding the scatter plot matrix. size : str or None, optional (default=None) A column name of df to use for size encoding the scatter plot matrix. Returns: -------- None Example: -------- >>> import pandas as pd >>> from sklearn.datasets import load_iris >>> iris = load_iris() >>> df = pd.DataFrame(data=iris.data, columns=iris.feature_names) >>> pairplot(df, color=iris.target, size=iris.target) """ import plotly.express as px fig = px.scatter_matrix(df, color=color, size=size) fig.update_traces(diagonal_visible=False) fig.show() def area_plot(df, x_col, y_col, title=None, x_title=None, y_title=None, color=None, filename=None): """ A function to create an Area plot based on data from a DataFrame using Plotly. Parameters: df (pandas.DataFrame): The DataFrame containing the data to be plotted. x_col (str): The name of the column in the DataFrame to use as the x-axis data. y_col (str): The name of the column in the DataFrame to use as the y-axis data. title (str): The title of the plot. x_title (str): The title of the x-axis. y_title (str): The title of the y-axis. color (str): The name of the column in the DataFrame to use as the color data. filename (str): The filename to save the plot as. If not specified, the plot will not be saved. Returns: None """ import plotly.express as px fig = px.area(df, x=x_col, y=y_col, color=color, title=title) fig.update_layout(xaxis_title=x_title, yaxis_title=y_title) if filename: fig.write_html(filename) fig.show() def sunburst(df, hierarchy_cols, size_col, color_col=None, title=None, width=800, height=800, font_size=14, colorscale='YlOrRd', download_path=None): """ Create a sunburst graph using Plotly based on data from a data frame. Args: - df: pandas DataFrame containing the data for the sunburst graph. - hierarchy_cols: list of column names to use for the hierarchy of the sunburst graph. - size_col: name of the column to use for the size of the segments in the sunburst graph. - color_col: name of the column to use for the color of the segments in the sunburst graph. - title: title of the sunburst graph. - width: width of the sunburst graph in pixels. - height: height of the sunburst graph in pixels. - font_size: font size for the text in the sunburst graph. - colorscale: name of the Plotly colorscale to use for the color of the segments. - download_path: file path for downloading the sunburst graph as an HTML file. If None, the graph will not be downloaded. Returns: - fig: Plotly figure object for the sunburst graph. """ # Create a list of values for each hierarchy level import plotly.graph_objs as go hierarchy_values = [] for i in range(len(hierarchy_cols)): hierarchy_values.append(df[hierarchy_cols[i]].unique().tolist()) # Create a Plotly sunburst graph fig = go.Figure(go.Sunburst( labels=df[hierarchy_cols[-1]], parents=df[hierarchy_cols[-2]], values=df[size_col], branchvalues='total', marker=dict( colors=df[color_col] if color_col is not None else None, colorscale=colorscale ), textfont=dict( size=font_size ), insidetextorientation='radial', maxdepth=len(hierarchy_cols)-1 )) # Set the colorbar title if color_col is not None: fig.update_layout(coloraxis_colorbar=dict( title=color_col.capitalize(), title_font=dict( size=font_size ), ticksuffix=' ' )) # Set the sunburst graph title if title is not None: fig.update_layout(title={ 'text': title, 'font': { 'size': font_size } }) # Set the sunburst graph dimensions fig.update_layout(width=width, height=height) # Download the sunburst graph as an HTML file if download_path is not None: fig.write_html(download_path) # Show the sunburst graph fig.show() def pie_plot(df, values_column, names_column, title='Pie Chart', width=800, height=600, filename='pie_chart.html',auto_open=False): """ The pie_plot function creates a pie chart based on data from a pandas DataFrame using Plotly. The function takes the following parameters: df: A pandas DataFrame containing the data to be plotted. values_column: The name of the DataFrame column containing the values for the pie chart slices. names_column: The name of the DataFrame column containing the names for the pie chart slices. title: The title of the chart. Default is 'Pie Chart'. width: The width of the chart in pixels. Default is 800. height: The height of the chart in pixels. Default is 600. filename: The name of the file to save the chart to. Default is 'pie_chart.html'. The function returns a Plotly figure object. """ import plotly.express as px import plotly.io as pio fig = px.pie(df, values=values_column, names=names_column, title=title) fig.update_traces(textposition='inside', textinfo='percent+label') fig.update_layout(width=width, height=height) pio.write_html(fig, file=filename, auto_open=auto_open) return fig
PypiClean
/DACBench-0.2.0.tar.gz/DACBench-0.2.0/dacbench/benchmarks/modcma_benchmark.py
import itertools import os import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH import numpy as np from modcma import Parameters from dacbench.abstract_benchmark import AbstractBenchmark, objdict from dacbench.envs import CMAStepSizeEnv, ModCMAEnv DEFAULT_CFG_SPACE = CS.ConfigurationSpace() ACTIVE = CSH.CategoricalHyperparameter(name="0_active", choices=[True, False]) ELITIST = CSH.CategoricalHyperparameter(name="1_elitist", choices=[True, False]) ORTHOGONAL = CSH.CategoricalHyperparameter(name="2_orthogonal", choices=[True, False]) SEQUENTIAL = CSH.CategoricalHyperparameter(name="3_sequential", choices=[True, False]) THRESHOLD_CONVERGENCE = CSH.CategoricalHyperparameter( name="4_threshold_convergence", choices=[True, False] ) STEP_SIZE_ADAPTION = CSH.CategoricalHyperparameter( name="5_step_size_adaption", choices=["csa", "tpa", "msr", "xnes", "m-xnes", "lp-xnes", "psr"], ) MIRRORED = CSH.CategoricalHyperparameter( name="6_mirrored", choices=["None", "mirrored", "mirrored pairwise"] ) BASE_SAMPLER = CSH.CategoricalHyperparameter( name="7_base_sampler", choices=["gaussian", "sobol", "halton"] ) WEIGHTS_OPTION = CSH.CategoricalHyperparameter( name="8_weights_option", choices=["default", "equal", "1/2^lambda"] ) LOCAL_RESTART = CSH.CategoricalHyperparameter( name="90_local_restart", choices=["None", "IPOP", "BIPOP"] ) BOUND_CORRECTION = CSH.CategoricalHyperparameter( name="91_bound_correction", choices=["None", "saturate", "unif_resample", "COTN", "toroidal", "mirror"], ) DEFAULT_CFG_SPACE.add_hyperparameter(ACTIVE) DEFAULT_CFG_SPACE.add_hyperparameter(ELITIST) DEFAULT_CFG_SPACE.add_hyperparameter(ORTHOGONAL) DEFAULT_CFG_SPACE.add_hyperparameter(SEQUENTIAL) DEFAULT_CFG_SPACE.add_hyperparameter(THRESHOLD_CONVERGENCE) DEFAULT_CFG_SPACE.add_hyperparameter(STEP_SIZE_ADAPTION) DEFAULT_CFG_SPACE.add_hyperparameter(MIRRORED) DEFAULT_CFG_SPACE.add_hyperparameter(BASE_SAMPLER) DEFAULT_CFG_SPACE.add_hyperparameter(WEIGHTS_OPTION) DEFAULT_CFG_SPACE.add_hyperparameter(LOCAL_RESTART) DEFAULT_CFG_SPACE.add_hyperparameter(BOUND_CORRECTION) INFO = { "identifier": "ModCMA", "name": "Online Selection of CMA-ES Variants", "reward": "Negative best function value", "state_description": [ "Generation Size", "Sigma", "Remaining Budget", "Function ID", "Instance ID", ], } MODCMA_DEFAULTS = objdict( { "config_space": DEFAULT_CFG_SPACE, "action_space_class": "MultiDiscrete", "action_space_args": [ list( map( lambda m: len( getattr(getattr(Parameters, m), "options", [False, True]) ), Parameters.__modules__, ) ) ], "observation_space_class": "Box", "observation_space_args": [-np.inf * np.ones(5), np.inf * np.ones(5)], "observation_space_type": np.float32, "reward_range": (-(10**12), 0), "budget": 100, "cutoff": 1e6, "seed": 0, "multi_agent": False, "instance_set_path": os.path.join( os.path.dirname(os.path.abspath(__file__)), "../instance_sets/modea/modea_train.csv", ), "test_set_path": os.path.join( os.path.dirname(os.path.abspath(__file__)), "../instance_sets/modea/modea_train.csv", ), "benchmark_info": INFO, } ) class ModCMABenchmark(AbstractBenchmark): def __init__(self, config_path: str = None, step_size=False, config=None): super().__init__(config_path, config) self.config = objdict(MODCMA_DEFAULTS.copy(), **(self.config or dict())) self.step_size = step_size def get_environment(self): if "instance_set" not in self.config: self.read_instance_set() # Read test set if path is specified if ( "test_set" not in self.config.keys() and "test_set_path" in self.config.keys() ): self.read_instance_set(test=True) if self.step_size: self.config.action_space_class = "Box" self.config.action_space_args = [np.array([0]), np.array([10])] env = CMAStepSizeEnv(self.config) else: env = ModCMAEnv(self.config) for func in self.wrap_funcs: env = func(env) return env def read_instance_set(self, test=False): if test: path = self.config.test_set_path keyword = "test_set" else: path = self.config.instance_set_path keyword = "instance_set" self.config[keyword] = dict() with open(path, "r") as fh: for line in itertools.islice(fh, 1, None): _id, dim, fid, iid, *representation = line.strip().split(",") self.config[keyword][int(_id)] = [ int(dim), int(fid), int(iid), list(map(int, representation)), ] def get_benchmark(self, seed: int = 0): self.config = MODCMA_DEFAULTS.copy() self.config.seed = seed self.read_instance_set() self.read_instance_set(test=True) return ModCMAEnv(self.config)
PypiClean
/AyiinXd-0.0.8-cp311-cp311-macosx_10_9_universal2.whl/fipper/node_modules/@types/node/ts4.8/stream.d.ts
declare module 'stream' { import { EventEmitter, Abortable } from 'node:events'; import { Blob as NodeBlob } from "node:buffer"; import * as streamPromises from 'node:stream/promises'; import * as streamConsumers from 'node:stream/consumers'; import * as streamWeb from 'node:stream/web'; type ComposeFnParam = (source: any) => void; class internal extends EventEmitter { pipe<T extends NodeJS.WritableStream>( destination: T, options?: { end?: boolean | undefined; } ): T; compose<T extends NodeJS.ReadableStream>(stream: T | ComposeFnParam | Iterable<T> | AsyncIterable<T>, options?: { signal: AbortSignal }): T; } namespace internal { class Stream extends internal { constructor(opts?: ReadableOptions); } interface StreamOptions<T extends Stream> extends Abortable { emitClose?: boolean | undefined; highWaterMark?: number | undefined; objectMode?: boolean | undefined; construct?(this: T, callback: (error?: Error | null) => void): void; destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void; autoDestroy?: boolean | undefined; } interface ReadableOptions extends StreamOptions<Readable> { encoding?: BufferEncoding | undefined; read?(this: Readable, size: number): void; } /** * @since v0.9.4 */ class Readable extends Stream implements NodeJS.ReadableStream { /** * A utility method for creating Readable Streams out of iterators. */ static from(iterable: Iterable<any> | AsyncIterable<any>, options?: ReadableOptions): Readable; /** * A utility method for creating a `Readable` from a web `ReadableStream`. * @since v17.0.0 * @experimental */ static fromWeb(readableStream: streamWeb.ReadableStream, options?: Pick<ReadableOptions, 'encoding' | 'highWaterMark' | 'objectMode' | 'signal'>): Readable; /** * Returns whether the stream has been read from or cancelled. * @since v16.8.0 */ static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean; /** * A utility method for creating a web `ReadableStream` from a `Readable`. * @since v17.0.0 * @experimental */ static toWeb(streamReadable: Readable): streamWeb.ReadableStream; /** * Returns whether the stream was destroyed or errored before emitting `'end'`. * @since v16.8.0 * @experimental */ readonly readableAborted: boolean; /** * Is `true` if it is safe to call `readable.read()`, which means * the stream has not been destroyed or emitted `'error'` or `'end'`. * @since v11.4.0 */ readable: boolean; /** * Returns whether `'data'` has been emitted. * @since v16.7.0, v14.18.0 * @experimental */ readonly readableDidRead: boolean; /** * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method. * @since v12.7.0 */ readonly readableEncoding: BufferEncoding | null; /** * Becomes `true` when `'end'` event is emitted. * @since v12.9.0 */ readonly readableEnded: boolean; /** * This property reflects the current state of a `Readable` stream as described * in the `Three states` section. * @since v9.4.0 */ readonly readableFlowing: boolean | null; /** * Returns the value of `highWaterMark` passed when creating this `Readable`. * @since v9.3.0 */ readonly readableHighWaterMark: number; /** * This property contains the number of bytes (or objects) in the queue * ready to be read. The value provides introspection data regarding * the status of the `highWaterMark`. * @since v9.4.0 */ readonly readableLength: number; /** * Getter for the property `objectMode` of a given `Readable` stream. * @since v12.3.0 */ readonly readableObjectMode: boolean; /** * Is `true` after `readable.destroy()` has been called. * @since v8.0.0 */ destroyed: boolean; /** * Is true after 'close' has been emitted. * @since v18.0.0 */ readonly closed: boolean; /** * Returns error if the stream has been destroyed with an error. * @since v18.0.0 */ readonly errored: Error | null; constructor(opts?: ReadableOptions); _construct?(callback: (error?: Error | null) => void): void; _read(size: number): void; /** * The `readable.read()` method reads data out of the internal buffer and * returns it. If no data is available to be read, `null` is returned. By default, * the data is returned as a `Buffer` object unless an encoding has been * specified using the `readable.setEncoding()` method or the stream is operating * in object mode. * * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which * case all of the data remaining in the internal * buffer will be returned. * * If the `size` argument is not specified, all of the data contained in the * internal buffer will be returned. * * The `size` argument must be less than or equal to 1 GiB. * * The `readable.read()` method should only be called on `Readable` streams * operating in paused mode. In flowing mode, `readable.read()` is called * automatically until the internal buffer is fully drained. * * ```js * const readable = getReadableStreamSomehow(); * * // 'readable' may be triggered multiple times as data is buffered in * readable.on('readable', () => { * let chunk; * console.log('Stream is readable (new data received in buffer)'); * // Use a loop to make sure we read all currently available data * while (null !== (chunk = readable.read())) { * console.log(`Read ${chunk.length} bytes of data...`); * } * }); * * // 'end' will be triggered once when there is no more data available * readable.on('end', () => { * console.log('Reached end of stream.'); * }); * ``` * * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks * are not concatenated. A `while` loop is necessary to consume all data * currently in the buffer. When reading a large file `.read()` may return `null`, * having consumed all buffered content so far, but there is still more data to * come not yet buffered. In this case a new `'readable'` event will be emitted * when there is more data in the buffer. Finally the `'end'` event will be * emitted when there is no more data to come. * * Therefore to read a file's whole contents from a `readable`, it is necessary * to collect chunks across multiple `'readable'` events: * * ```js * const chunks = []; * * readable.on('readable', () => { * let chunk; * while (null !== (chunk = readable.read())) { * chunks.push(chunk); * } * }); * * readable.on('end', () => { * const content = chunks.join(''); * }); * ``` * * A `Readable` stream in object mode will always return a single item from * a call to `readable.read(size)`, regardless of the value of the`size` argument. * * If the `readable.read()` method returns a chunk of data, a `'data'` event will * also be emitted. * * Calling {@link read} after the `'end'` event has * been emitted will return `null`. No runtime error will be raised. * @since v0.9.4 * @param size Optional argument to specify how much data to read. */ read(size?: number): any; /** * The `readable.setEncoding()` method sets the character encoding for * data read from the `Readable` stream. * * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal * string format. * * The `Readable` stream will properly handle multi-byte characters delivered * through the stream that would otherwise become improperly decoded if simply * pulled from the stream as `Buffer` objects. * * ```js * const readable = getReadableStreamSomehow(); * readable.setEncoding('utf8'); * readable.on('data', (chunk) => { * assert.equal(typeof chunk, 'string'); * console.log('Got %d characters of string data:', chunk.length); * }); * ``` * @since v0.9.4 * @param encoding The encoding to use. */ setEncoding(encoding: BufferEncoding): this; /** * The `readable.pause()` method will cause a stream in flowing mode to stop * emitting `'data'` events, switching out of flowing mode. Any data that * becomes available will remain in the internal buffer. * * ```js * const readable = getReadableStreamSomehow(); * readable.on('data', (chunk) => { * console.log(`Received ${chunk.length} bytes of data.`); * readable.pause(); * console.log('There will be no additional data for 1 second.'); * setTimeout(() => { * console.log('Now data will start flowing again.'); * readable.resume(); * }, 1000); * }); * ``` * * The `readable.pause()` method has no effect if there is a `'readable'`event listener. * @since v0.9.4 */ pause(): this; /** * The `readable.resume()` method causes an explicitly paused `Readable` stream to * resume emitting `'data'` events, switching the stream into flowing mode. * * The `readable.resume()` method can be used to fully consume the data from a * stream without actually processing any of that data: * * ```js * getReadableStreamSomehow() * .resume() * .on('end', () => { * console.log('Reached the end, but did not read anything.'); * }); * ``` * * The `readable.resume()` method has no effect if there is a `'readable'`event listener. * @since v0.9.4 */ resume(): this; /** * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most * typical cases, there will be no reason to * use this method directly. * * ```js * const readable = new stream.Readable(); * * readable.isPaused(); // === false * readable.pause(); * readable.isPaused(); // === true * readable.resume(); * readable.isPaused(); // === false * ``` * @since v0.11.14 */ isPaused(): boolean; /** * The `readable.unpipe()` method detaches a `Writable` stream previously attached * using the {@link pipe} method. * * If the `destination` is not specified, then _all_ pipes are detached. * * If the `destination` is specified, but no pipe is set up for it, then * the method does nothing. * * ```js * const fs = require('fs'); * const readable = getReadableStreamSomehow(); * const writable = fs.createWriteStream('file.txt'); * // All the data from readable goes into 'file.txt', * // but only for the first second. * readable.pipe(writable); * setTimeout(() => { * console.log('Stop writing to file.txt.'); * readable.unpipe(writable); * console.log('Manually close the file stream.'); * writable.end(); * }, 1000); * ``` * @since v0.9.4 * @param destination Optional specific stream to unpipe */ unpipe(destination?: NodeJS.WritableStream): this; /** * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the * same as `readable.push(null)`, after which no more data can be written. The EOF * signal is put at the end of the buffer and any buffered data will still be * flushed. * * The `readable.unshift()` method pushes a chunk of data back into the internal * buffer. This is useful in certain situations where a stream is being consumed by * code that needs to "un-consume" some amount of data that it has optimistically * pulled out of the source, so that the data can be passed on to some other party. * * The `stream.unshift(chunk)` method cannot be called after the `'end'` event * has been emitted or a runtime error will be thrown. * * Developers using `stream.unshift()` often should consider switching to * use of a `Transform` stream instead. See the `API for stream implementers` section for more information. * * ```js * // Pull off a header delimited by \n\n. * // Use unshift() if we get too much. * // Call the callback with (error, header, stream). * const { StringDecoder } = require('string_decoder'); * function parseHeader(stream, callback) { * stream.on('error', callback); * stream.on('readable', onReadable); * const decoder = new StringDecoder('utf8'); * let header = ''; * function onReadable() { * let chunk; * while (null !== (chunk = stream.read())) { * const str = decoder.write(chunk); * if (str.includes('\n\n')) { * // Found the header boundary. * const split = str.split(/\n\n/); * header += split.shift(); * const remaining = split.join('\n\n'); * const buf = Buffer.from(remaining, 'utf8'); * stream.removeListener('error', callback); * // Remove the 'readable' listener before unshifting. * stream.removeListener('readable', onReadable); * if (buf.length) * stream.unshift(buf); * // Now the body of the message can be read from the stream. * callback(null, header, stream); * return; * } * // Still reading the header. * header += str; * } * } * } * ``` * * Unlike {@link push}, `stream.unshift(chunk)` will not * end the reading process by resetting the internal reading state of the stream. * This can cause unexpected results if `readable.unshift()` is called during a * read (i.e. from within a {@link _read} implementation on a * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately, * however it is best to simply avoid calling `readable.unshift()` while in the * process of performing a read. * @since v0.9.11 * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode * streams, `chunk` may be any JavaScript value. * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`. */ unshift(chunk: any, encoding?: BufferEncoding): void; /** * Prior to Node.js 0.10, streams did not implement the entire `stream` module API * as it is currently defined. (See `Compatibility` for more information.) * * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable` * stream that uses * the old stream as its data source. * * It will rarely be necessary to use `readable.wrap()` but the method has been * provided as a convenience for interacting with older Node.js applications and * libraries. * * ```js * const { OldReader } = require('./old-api-module.js'); * const { Readable } = require('stream'); * const oreader = new OldReader(); * const myReader = new Readable().wrap(oreader); * * myReader.on('readable', () => { * myReader.read(); // etc. * }); * ``` * @since v0.9.4 * @param stream An "old style" readable stream */ wrap(stream: NodeJS.ReadableStream): this; push(chunk: any, encoding?: BufferEncoding): boolean; _destroy(error: Error | null, callback: (error?: Error | null) => void): void; /** * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable * stream will release any internal resources and subsequent calls to `push()`will be ignored. * * Once `destroy()` has been called any further calls will be a no-op and no * further errors except from `_destroy()` may be emitted as `'error'`. * * Implementors should not override this method, but instead implement `readable._destroy()`. * @since v8.0.0 * @param error Error which will be passed as payload in `'error'` event */ destroy(error?: Error): this; /** * Event emitter * The defined events on documents including: * 1. close * 2. data * 3. end * 4. error * 5. pause * 6. readable * 7. resume */ addListener(event: 'close', listener: () => void): this; addListener(event: 'data', listener: (chunk: any) => void): this; addListener(event: 'end', listener: () => void): this; addListener(event: 'error', listener: (err: Error) => void): this; addListener(event: 'pause', listener: () => void): this; addListener(event: 'readable', listener: () => void): this; addListener(event: 'resume', listener: () => void): this; addListener(event: string | symbol, listener: (...args: any[]) => void): this; emit(event: 'close'): boolean; emit(event: 'data', chunk: any): boolean; emit(event: 'end'): boolean; emit(event: 'error', err: Error): boolean; emit(event: 'pause'): boolean; emit(event: 'readable'): boolean; emit(event: 'resume'): boolean; emit(event: string | symbol, ...args: any[]): boolean; on(event: 'close', listener: () => void): this; on(event: 'data', listener: (chunk: any) => void): this; on(event: 'end', listener: () => void): this; on(event: 'error', listener: (err: Error) => void): this; on(event: 'pause', listener: () => void): this; on(event: 'readable', listener: () => void): this; on(event: 'resume', listener: () => void): this; on(event: string | symbol, listener: (...args: any[]) => void): this; once(event: 'close', listener: () => void): this; once(event: 'data', listener: (chunk: any) => void): this; once(event: 'end', listener: () => void): this; once(event: 'error', listener: (err: Error) => void): this; once(event: 'pause', listener: () => void): this; once(event: 'readable', listener: () => void): this; once(event: 'resume', listener: () => void): this; once(event: string | symbol, listener: (...args: any[]) => void): this; prependListener(event: 'close', listener: () => void): this; prependListener(event: 'data', listener: (chunk: any) => void): this; prependListener(event: 'end', listener: () => void): this; prependListener(event: 'error', listener: (err: Error) => void): this; prependListener(event: 'pause', listener: () => void): this; prependListener(event: 'readable', listener: () => void): this; prependListener(event: 'resume', listener: () => void): this; prependListener(event: string | symbol, listener: (...args: any[]) => void): this; prependOnceListener(event: 'close', listener: () => void): this; prependOnceListener(event: 'data', listener: (chunk: any) => void): this; prependOnceListener(event: 'end', listener: () => void): this; prependOnceListener(event: 'error', listener: (err: Error) => void): this; prependOnceListener(event: 'pause', listener: () => void): this; prependOnceListener(event: 'readable', listener: () => void): this; prependOnceListener(event: 'resume', listener: () => void): this; prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; removeListener(event: 'close', listener: () => void): this; removeListener(event: 'data', listener: (chunk: any) => void): this; removeListener(event: 'end', listener: () => void): this; removeListener(event: 'error', listener: (err: Error) => void): this; removeListener(event: 'pause', listener: () => void): this; removeListener(event: 'readable', listener: () => void): this; removeListener(event: 'resume', listener: () => void): this; removeListener(event: string | symbol, listener: (...args: any[]) => void): this; [Symbol.asyncIterator](): AsyncIterableIterator<any>; } interface WritableOptions extends StreamOptions<Writable> { decodeStrings?: boolean | undefined; defaultEncoding?: BufferEncoding | undefined; write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; writev?( this: Writable, chunks: Array<{ chunk: any; encoding: BufferEncoding; }>, callback: (error?: Error | null) => void ): void; final?(this: Writable, callback: (error?: Error | null) => void): void; } /** * @since v0.9.4 */ class Writable extends Stream implements NodeJS.WritableStream { /** * A utility method for creating a `Writable` from a web `WritableStream`. * @since v17.0.0 * @experimental */ static fromWeb(writableStream: streamWeb.WritableStream, options?: Pick<WritableOptions, 'decodeStrings' | 'highWaterMark' | 'objectMode' | 'signal'>): Writable; /** * A utility method for creating a web `WritableStream` from a `Writable`. * @since v17.0.0 * @experimental */ static toWeb(streamWritable: Writable): streamWeb.WritableStream; /** * Is `true` if it is safe to call `writable.write()`, which means * the stream has not been destroyed, errored or ended. * @since v11.4.0 */ readonly writable: boolean; /** * Is `true` after `writable.end()` has been called. This property * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead. * @since v12.9.0 */ readonly writableEnded: boolean; /** * Is set to `true` immediately before the `'finish'` event is emitted. * @since v12.6.0 */ readonly writableFinished: boolean; /** * Return the value of `highWaterMark` passed when creating this `Writable`. * @since v9.3.0 */ readonly writableHighWaterMark: number; /** * This property contains the number of bytes (or objects) in the queue * ready to be written. The value provides introspection data regarding * the status of the `highWaterMark`. * @since v9.4.0 */ readonly writableLength: number; /** * Getter for the property `objectMode` of a given `Writable` stream. * @since v12.3.0 */ readonly writableObjectMode: boolean; /** * Number of times `writable.uncork()` needs to be * called in order to fully uncork the stream. * @since v13.2.0, v12.16.0 */ readonly writableCorked: number; /** * Is `true` after `writable.destroy()` has been called. * @since v8.0.0 */ destroyed: boolean; /** * Is true after 'close' has been emitted. * @since v18.0.0 */ readonly closed: boolean; /** * Returns error if the stream has been destroyed with an error. * @since v18.0.0 */ readonly errored: Error | null; /** * Is `true` if the stream's buffer has been full and stream will emit 'drain'. * @since v15.2.0, v14.17.0 */ readonly writableNeedDrain: boolean; constructor(opts?: WritableOptions); _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; _writev?( chunks: Array<{ chunk: any; encoding: BufferEncoding; }>, callback: (error?: Error | null) => void ): void; _construct?(callback: (error?: Error | null) => void): void; _destroy(error: Error | null, callback: (error?: Error | null) => void): void; _final(callback: (error?: Error | null) => void): void; /** * The `writable.write()` method writes some data to the stream, and calls the * supplied `callback` once the data has been fully handled. If an error * occurs, the `callback` will be called with the error as its * first argument. The `callback` is called asynchronously and before `'error'` is * emitted. * * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`. * If `false` is returned, further attempts to write data to the stream should * stop until the `'drain'` event is emitted. * * While a stream is not draining, calls to `write()` will buffer `chunk`, and * return false. Once all currently buffered chunks are drained (accepted for * delivery by the operating system), the `'drain'` event will be emitted. * Once `write()` returns false, do not write more chunks * until the `'drain'` event is emitted. While calling `write()` on a stream that * is not draining is allowed, Node.js will buffer all written chunks until * maximum memory usage occurs, at which point it will abort unconditionally. * Even before it aborts, high memory usage will cause poor garbage collector * performance and high RSS (which is not typically released back to the system, * even after the memory is no longer required). Since TCP sockets may never * drain if the remote peer does not read the data, writing a socket that is * not draining may lead to a remotely exploitable vulnerability. * * Writing data while the stream is not draining is particularly * problematic for a `Transform`, because the `Transform` streams are paused * by default until they are piped or a `'data'` or `'readable'` event handler * is added. * * If the data to be written can be generated or fetched on demand, it is * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is * possible to respect backpressure and avoid memory issues using the `'drain'` event: * * ```js * function write(data, cb) { * if (!stream.write(data)) { * stream.once('drain', cb); * } else { * process.nextTick(cb); * } * } * * // Wait for cb to be called before doing any other write. * write('hello', () => { * console.log('Write completed, do more writes now.'); * }); * ``` * * A `Writable` stream in object mode will always ignore the `encoding` argument. * @since v0.9.4 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any * JavaScript value other than `null`. * @param [encoding='utf8'] The encoding, if `chunk` is a string. * @param callback Callback for when this chunk of data is flushed. * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`. */ write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean; write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean; /** * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream. * @since v0.11.15 * @param encoding The new default encoding */ setDefaultEncoding(encoding: BufferEncoding): this; /** * Calling the `writable.end()` method signals that no more data will be written * to the `Writable`. The optional `chunk` and `encoding` arguments allow one * final additional chunk of data to be written immediately before closing the * stream. * * Calling the {@link write} method after calling {@link end} will raise an error. * * ```js * // Write 'hello, ' and then end with 'world!'. * const fs = require('fs'); * const file = fs.createWriteStream('example.txt'); * file.write('hello, '); * file.end('world!'); * // Writing more now is not allowed! * ``` * @since v0.9.4 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any * JavaScript value other than `null`. * @param encoding The encoding if `chunk` is a string * @param callback Callback for when the stream is finished. */ end(cb?: () => void): this; end(chunk: any, cb?: () => void): this; end(chunk: any, encoding: BufferEncoding, cb?: () => void): this; /** * The `writable.cork()` method forces all written data to be buffered in memory. * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called. * * The primary intent of `writable.cork()` is to accommodate a situation in which * several small chunks are written to the stream in rapid succession. Instead of * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them * all to `writable._writev()`, if present. This prevents a head-of-line blocking * situation where data is being buffered while waiting for the first small chunk * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput. * * See also: `writable.uncork()`, `writable._writev()`. * @since v0.11.2 */ cork(): void; /** * The `writable.uncork()` method flushes all data buffered since {@link cork} was called. * * When using `writable.cork()` and `writable.uncork()` to manage the buffering * of writes to a stream, defer calls to `writable.uncork()` using`process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event * loop phase. * * ```js * stream.cork(); * stream.write('some '); * stream.write('data '); * process.nextTick(() => stream.uncork()); * ``` * * If the `writable.cork()` method is called multiple times on a stream, the * same number of calls to `writable.uncork()` must be called to flush the buffered * data. * * ```js * stream.cork(); * stream.write('some '); * stream.cork(); * stream.write('data '); * process.nextTick(() => { * stream.uncork(); * // The data will not be flushed until uncork() is called a second time. * stream.uncork(); * }); * ``` * * See also: `writable.cork()`. * @since v0.11.2 */ uncork(): void; /** * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable * stream has ended and subsequent calls to `write()` or `end()` will result in * an `ERR_STREAM_DESTROYED` error. * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error. * Use `end()` instead of destroy if data should flush before close, or wait for * the `'drain'` event before destroying the stream. * * Once `destroy()` has been called any further calls will be a no-op and no * further errors except from `_destroy()` may be emitted as `'error'`. * * Implementors should not override this method, * but instead implement `writable._destroy()`. * @since v8.0.0 * @param error Optional, an error to emit with `'error'` event. */ destroy(error?: Error): this; /** * Event emitter * The defined events on documents including: * 1. close * 2. drain * 3. error * 4. finish * 5. pipe * 6. unpipe */ addListener(event: 'close', listener: () => void): this; addListener(event: 'drain', listener: () => void): this; addListener(event: 'error', listener: (err: Error) => void): this; addListener(event: 'finish', listener: () => void): this; addListener(event: 'pipe', listener: (src: Readable) => void): this; addListener(event: 'unpipe', listener: (src: Readable) => void): this; addListener(event: string | symbol, listener: (...args: any[]) => void): this; emit(event: 'close'): boolean; emit(event: 'drain'): boolean; emit(event: 'error', err: Error): boolean; emit(event: 'finish'): boolean; emit(event: 'pipe', src: Readable): boolean; emit(event: 'unpipe', src: Readable): boolean; emit(event: string | symbol, ...args: any[]): boolean; on(event: 'close', listener: () => void): this; on(event: 'drain', listener: () => void): this; on(event: 'error', listener: (err: Error) => void): this; on(event: 'finish', listener: () => void): this; on(event: 'pipe', listener: (src: Readable) => void): this; on(event: 'unpipe', listener: (src: Readable) => void): this; on(event: string | symbol, listener: (...args: any[]) => void): this; once(event: 'close', listener: () => void): this; once(event: 'drain', listener: () => void): this; once(event: 'error', listener: (err: Error) => void): this; once(event: 'finish', listener: () => void): this; once(event: 'pipe', listener: (src: Readable) => void): this; once(event: 'unpipe', listener: (src: Readable) => void): this; once(event: string | symbol, listener: (...args: any[]) => void): this; prependListener(event: 'close', listener: () => void): this; prependListener(event: 'drain', listener: () => void): this; prependListener(event: 'error', listener: (err: Error) => void): this; prependListener(event: 'finish', listener: () => void): this; prependListener(event: 'pipe', listener: (src: Readable) => void): this; prependListener(event: 'unpipe', listener: (src: Readable) => void): this; prependListener(event: string | symbol, listener: (...args: any[]) => void): this; prependOnceListener(event: 'close', listener: () => void): this; prependOnceListener(event: 'drain', listener: () => void): this; prependOnceListener(event: 'error', listener: (err: Error) => void): this; prependOnceListener(event: 'finish', listener: () => void): this; prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this; prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this; prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; removeListener(event: 'close', listener: () => void): this; removeListener(event: 'drain', listener: () => void): this; removeListener(event: 'error', listener: (err: Error) => void): this; removeListener(event: 'finish', listener: () => void): this; removeListener(event: 'pipe', listener: (src: Readable) => void): this; removeListener(event: 'unpipe', listener: (src: Readable) => void): this; removeListener(event: string | symbol, listener: (...args: any[]) => void): this; } interface DuplexOptions extends ReadableOptions, WritableOptions { allowHalfOpen?: boolean | undefined; readableObjectMode?: boolean | undefined; writableObjectMode?: boolean | undefined; readableHighWaterMark?: number | undefined; writableHighWaterMark?: number | undefined; writableCorked?: number | undefined; construct?(this: Duplex, callback: (error?: Error | null) => void): void; read?(this: Duplex, size: number): void; write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; writev?( this: Duplex, chunks: Array<{ chunk: any; encoding: BufferEncoding; }>, callback: (error?: Error | null) => void ): void; final?(this: Duplex, callback: (error?: Error | null) => void): void; destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void; } /** * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces. * * Examples of `Duplex` streams include: * * * `TCP sockets` * * `zlib streams` * * `crypto streams` * @since v0.9.4 */ class Duplex extends Readable implements Writable { readonly writable: boolean; readonly writableEnded: boolean; readonly writableFinished: boolean; readonly writableHighWaterMark: number; readonly writableLength: number; readonly writableObjectMode: boolean; readonly writableCorked: number; readonly writableNeedDrain: boolean; readonly closed: boolean; readonly errored: Error | null; /** * If `false` then the stream will automatically end the writable side when the * readable side ends. Set initially by the `allowHalfOpen` constructor option, * which defaults to `false`. * * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is * emitted. * @since v0.9.4 */ allowHalfOpen: boolean; constructor(opts?: DuplexOptions); /** * A utility method for creating duplex streams. * * - `Stream` converts writable stream into writable `Duplex` and readable stream * to `Duplex`. * - `Blob` converts into readable `Duplex`. * - `string` converts into readable `Duplex`. * - `ArrayBuffer` converts into readable `Duplex`. * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`. * - `AsyncGeneratorFunction` converts into a readable/writable transform * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield * `null`. * - `AsyncFunction` converts into a writable `Duplex`. Must return * either `null` or `undefined` * - `Object ({ writable, readable })` converts `readable` and * `writable` into `Stream` and then combines them into `Duplex` where the * `Duplex` will write to the `writable` and read from the `readable`. * - `Promise` converts into readable `Duplex`. Value `null` is ignored. * * @since v16.8.0 */ static from(src: Stream | NodeBlob | ArrayBuffer | string | Iterable<any> | AsyncIterable<any> | AsyncGeneratorFunction | Promise<any> | Object): Duplex; _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; _writev?( chunks: Array<{ chunk: any; encoding: BufferEncoding; }>, callback: (error?: Error | null) => void ): void; _destroy(error: Error | null, callback: (error: Error | null) => void): void; _final(callback: (error?: Error | null) => void): void; write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean; write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean; setDefaultEncoding(encoding: BufferEncoding): this; end(cb?: () => void): this; end(chunk: any, cb?: () => void): this; end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this; cork(): void; uncork(): void; /** * Event emitter * The defined events on documents including: * 1. close * 2. data * 3. drain * 4. end * 5. error * 6. finish * 7. pause * 8. pipe * 9. readable * 10. resume * 11. unpipe */ addListener(event: 'close', listener: () => void): this; addListener(event: 'data', listener: (chunk: any) => void): this; addListener(event: 'drain', listener: () => void): this; addListener(event: 'end', listener: () => void): this; addListener(event: 'error', listener: (err: Error) => void): this; addListener(event: 'finish', listener: () => void): this; addListener(event: 'pause', listener: () => void): this; addListener(event: 'pipe', listener: (src: Readable) => void): this; addListener(event: 'readable', listener: () => void): this; addListener(event: 'resume', listener: () => void): this; addListener(event: 'unpipe', listener: (src: Readable) => void): this; addListener(event: string | symbol, listener: (...args: any[]) => void): this; emit(event: 'close'): boolean; emit(event: 'data', chunk: any): boolean; emit(event: 'drain'): boolean; emit(event: 'end'): boolean; emit(event: 'error', err: Error): boolean; emit(event: 'finish'): boolean; emit(event: 'pause'): boolean; emit(event: 'pipe', src: Readable): boolean; emit(event: 'readable'): boolean; emit(event: 'resume'): boolean; emit(event: 'unpipe', src: Readable): boolean; emit(event: string | symbol, ...args: any[]): boolean; on(event: 'close', listener: () => void): this; on(event: 'data', listener: (chunk: any) => void): this; on(event: 'drain', listener: () => void): this; on(event: 'end', listener: () => void): this; on(event: 'error', listener: (err: Error) => void): this; on(event: 'finish', listener: () => void): this; on(event: 'pause', listener: () => void): this; on(event: 'pipe', listener: (src: Readable) => void): this; on(event: 'readable', listener: () => void): this; on(event: 'resume', listener: () => void): this; on(event: 'unpipe', listener: (src: Readable) => void): this; on(event: string | symbol, listener: (...args: any[]) => void): this; once(event: 'close', listener: () => void): this; once(event: 'data', listener: (chunk: any) => void): this; once(event: 'drain', listener: () => void): this; once(event: 'end', listener: () => void): this; once(event: 'error', listener: (err: Error) => void): this; once(event: 'finish', listener: () => void): this; once(event: 'pause', listener: () => void): this; once(event: 'pipe', listener: (src: Readable) => void): this; once(event: 'readable', listener: () => void): this; once(event: 'resume', listener: () => void): this; once(event: 'unpipe', listener: (src: Readable) => void): this; once(event: string | symbol, listener: (...args: any[]) => void): this; prependListener(event: 'close', listener: () => void): this; prependListener(event: 'data', listener: (chunk: any) => void): this; prependListener(event: 'drain', listener: () => void): this; prependListener(event: 'end', listener: () => void): this; prependListener(event: 'error', listener: (err: Error) => void): this; prependListener(event: 'finish', listener: () => void): this; prependListener(event: 'pause', listener: () => void): this; prependListener(event: 'pipe', listener: (src: Readable) => void): this; prependListener(event: 'readable', listener: () => void): this; prependListener(event: 'resume', listener: () => void): this; prependListener(event: 'unpipe', listener: (src: Readable) => void): this; prependListener(event: string | symbol, listener: (...args: any[]) => void): this; prependOnceListener(event: 'close', listener: () => void): this; prependOnceListener(event: 'data', listener: (chunk: any) => void): this; prependOnceListener(event: 'drain', listener: () => void): this; prependOnceListener(event: 'end', listener: () => void): this; prependOnceListener(event: 'error', listener: (err: Error) => void): this; prependOnceListener(event: 'finish', listener: () => void): this; prependOnceListener(event: 'pause', listener: () => void): this; prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this; prependOnceListener(event: 'readable', listener: () => void): this; prependOnceListener(event: 'resume', listener: () => void): this; prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this; prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; removeListener(event: 'close', listener: () => void): this; removeListener(event: 'data', listener: (chunk: any) => void): this; removeListener(event: 'drain', listener: () => void): this; removeListener(event: 'end', listener: () => void): this; removeListener(event: 'error', listener: (err: Error) => void): this; removeListener(event: 'finish', listener: () => void): this; removeListener(event: 'pause', listener: () => void): this; removeListener(event: 'pipe', listener: (src: Readable) => void): this; removeListener(event: 'readable', listener: () => void): this; removeListener(event: 'resume', listener: () => void): this; removeListener(event: 'unpipe', listener: (src: Readable) => void): this; removeListener(event: string | symbol, listener: (...args: any[]) => void): this; } type TransformCallback = (error?: Error | null, data?: any) => void; interface TransformOptions extends DuplexOptions { construct?(this: Transform, callback: (error?: Error | null) => void): void; read?(this: Transform, size: number): void; write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void; writev?( this: Transform, chunks: Array<{ chunk: any; encoding: BufferEncoding; }>, callback: (error?: Error | null) => void ): void; final?(this: Transform, callback: (error?: Error | null) => void): void; destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void; transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; flush?(this: Transform, callback: TransformCallback): void; } /** * Transform streams are `Duplex` streams where the output is in some way * related to the input. Like all `Duplex` streams, `Transform` streams * implement both the `Readable` and `Writable` interfaces. * * Examples of `Transform` streams include: * * * `zlib streams` * * `crypto streams` * @since v0.9.4 */ class Transform extends Duplex { constructor(opts?: TransformOptions); _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void; _flush(callback: TransformCallback): void; } /** * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams. */ class PassThrough extends Transform {} /** * Attaches an AbortSignal to a readable or writeable stream. This lets code * control stream destruction using an `AbortController`. * * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream. * * ```js * const fs = require('fs'); * * const controller = new AbortController(); * const read = addAbortSignal( * controller.signal, * fs.createReadStream(('object.json')) * ); * // Later, abort the operation closing the stream * controller.abort(); * ``` * * Or using an `AbortSignal` with a readable stream as an async iterable: * * ```js * const controller = new AbortController(); * setTimeout(() => controller.abort(), 10_000); // set a timeout * const stream = addAbortSignal( * controller.signal, * fs.createReadStream(('object.json')) * ); * (async () => { * try { * for await (const chunk of stream) { * await process(chunk); * } * } catch (e) { * if (e.name === 'AbortError') { * // The operation was cancelled * } else { * throw e; * } * } * })(); * ``` * @since v15.4.0 * @param signal A signal representing possible cancellation * @param stream a stream to attach a signal to */ function addAbortSignal<T extends Stream>(signal: AbortSignal, stream: T): T; /** * Returns the default highWaterMark used by streams. * Defaults to `16384` (16 KiB), or `16` for `objectMode`. * @since v18.17.0 * @param objectMode */ function getDefaultHighWaterMark(objectMode: boolean): number; /** * Sets the default highWaterMark used by streams. * @since v18.17.0 * @param objectMode * @param value highWaterMark value */ function setDefaultHighWaterMark(objectMode: boolean, value: number): void; interface FinishedOptions extends Abortable { error?: boolean | undefined; readable?: boolean | undefined; writable?: boolean | undefined; } /** * A function to get notified when a stream is no longer readable, writable * or has experienced an error or a premature close event. * * ```js * const { finished } = require('stream'); * * const rs = fs.createReadStream('archive.tar'); * * finished(rs, (err) => { * if (err) { * console.error('Stream failed.', err); * } else { * console.log('Stream is done reading.'); * } * }); * * rs.resume(); // Drain the stream. * ``` * * Especially useful in error handling scenarios where a stream is destroyed * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`. * * The `finished` API provides promise version: * * ```js * const { finished } = require('stream/promises'); * * const rs = fs.createReadStream('archive.tar'); * * async function run() { * await finished(rs); * console.log('Stream is done reading.'); * } * * run().catch(console.error); * rs.resume(); // Drain the stream. * ``` * * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been * invoked. The reason for this is so that unexpected `'error'` events (due to * incorrect stream implementations) do not cause unexpected crashes. * If this is unwanted behavior then the returned cleanup function needs to be * invoked in the callback: * * ```js * const cleanup = finished(rs, (err) => { * cleanup(); * // ... * }); * ``` * @since v10.0.0 * @param stream A readable and/or writable stream. * @param callback A callback function that takes an optional error argument. * @return A cleanup function which removes all registered listeners. */ function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void; namespace finished { function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>; } type PipelineSourceFunction<T> = () => Iterable<T> | AsyncIterable<T>; type PipelineSource<T> = Iterable<T> | AsyncIterable<T> | NodeJS.ReadableStream | PipelineSourceFunction<T>; type PipelineTransform<S extends PipelineTransformSource<any>, U> = | NodeJS.ReadWriteStream | ((source: S extends (...args: any[]) => Iterable<infer ST> | AsyncIterable<infer ST> ? AsyncIterable<ST> : S) => AsyncIterable<U>); type PipelineTransformSource<T> = PipelineSource<T> | PipelineTransform<any, T>; type PipelineDestinationIterableFunction<T> = (source: AsyncIterable<T>) => AsyncIterable<any>; type PipelineDestinationPromiseFunction<T, P> = (source: AsyncIterable<T>) => Promise<P>; type PipelineDestination<S extends PipelineTransformSource<any>, P> = S extends PipelineTransformSource<infer ST> ? NodeJS.WritableStream | PipelineDestinationIterableFunction<ST> | PipelineDestinationPromiseFunction<ST, P> : never; type PipelineCallback<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P> ? (err: NodeJS.ErrnoException | null, value: P) => void : (err: NodeJS.ErrnoException | null) => void; type PipelinePromise<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P> ? Promise<P> : Promise<void>; interface PipelineOptions { signal?: AbortSignal | undefined; end?: boolean | undefined; } /** * A module method to pipe between streams and generators forwarding errors and * properly cleaning up and provide a callback when the pipeline is complete. * * ```js * const { pipeline } = require('stream'); * const fs = require('fs'); * const zlib = require('zlib'); * * // Use the pipeline API to easily pipe a series of streams * // together and get notified when the pipeline is fully done. * * // A pipeline to gzip a potentially huge tar file efficiently: * * pipeline( * fs.createReadStream('archive.tar'), * zlib.createGzip(), * fs.createWriteStream('archive.tar.gz'), * (err) => { * if (err) { * console.error('Pipeline failed.', err); * } else { * console.log('Pipeline succeeded.'); * } * } * ); * ``` * * The `pipeline` API provides a promise version, which can also * receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with * an`AbortError`. * * ```js * const { pipeline } = require('stream/promises'); * * async function run() { * await pipeline( * fs.createReadStream('archive.tar'), * zlib.createGzip(), * fs.createWriteStream('archive.tar.gz') * ); * console.log('Pipeline succeeded.'); * } * * run().catch(console.error); * ``` * * To use an `AbortSignal`, pass it inside an options object, * as the last argument: * * ```js * const { pipeline } = require('stream/promises'); * * async function run() { * const ac = new AbortController(); * const signal = ac.signal; * * setTimeout(() => ac.abort(), 1); * await pipeline( * fs.createReadStream('archive.tar'), * zlib.createGzip(), * fs.createWriteStream('archive.tar.gz'), * { signal }, * ); * } * * run().catch(console.error); // AbortError * ``` * * The `pipeline` API also supports async generators: * * ```js * const { pipeline } = require('stream/promises'); * const fs = require('fs'); * * async function run() { * await pipeline( * fs.createReadStream('lowercase.txt'), * async function* (source, { signal }) { * source.setEncoding('utf8'); // Work with strings rather than `Buffer`s. * for await (const chunk of source) { * yield await processChunk(chunk, { signal }); * } * }, * fs.createWriteStream('uppercase.txt') * ); * console.log('Pipeline succeeded.'); * } * * run().catch(console.error); * ``` * * Remember to handle the `signal` argument passed into the async generator. * Especially in the case where the async generator is the source for the * pipeline (i.e. first argument) or the pipeline will never complete. * * ```js * const { pipeline } = require('stream/promises'); * const fs = require('fs'); * * async function run() { * await pipeline( * async function* ({ signal }) { * await someLongRunningfn({ signal }); * yield 'asd'; * }, * fs.createWriteStream('uppercase.txt') * ); * console.log('Pipeline succeeded.'); * } * * run().catch(console.error); * ``` * * `stream.pipeline()` will call `stream.destroy(err)` on all streams except: * * * `Readable` streams which have emitted `'end'` or `'close'`. * * `Writable` streams which have emitted `'finish'` or `'close'`. * * `stream.pipeline()` leaves dangling event listeners on the streams * after the `callback` has been invoked. In the case of reuse of streams after * failure, this can cause event listener leaks and swallowed errors. If the last * stream is readable, dangling event listeners will be removed so that the last * stream can be consumed later. * * `stream.pipeline()` closes all the streams when an error is raised. * The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior * once it would destroy the socket without sending the expected response. * See the example below: * * ```js * const fs = require('fs'); * const http = require('http'); * const { pipeline } = require('stream'); * * const server = http.createServer((req, res) => { * const fileStream = fs.createReadStream('./fileNotExist.txt'); * pipeline(fileStream, res, (err) => { * if (err) { * console.log(err); // No such file * // this message can't be sent once `pipeline` already destroyed the socket * return res.end('error!!!'); * } * }); * }); * ``` * @since v10.0.0 * @param callback Called when the pipeline is fully done. */ function pipeline<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>( source: A, destination: B, callback?: PipelineCallback<B> ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>( source: A, transform1: T1, destination: B, callback?: PipelineCallback<B> ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>( source: A, transform1: T1, transform2: T2, destination: B, callback?: PipelineCallback<B> ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; function pipeline< A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, T3 extends PipelineTransform<T2, any>, B extends PipelineDestination<T3, any> >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; function pipeline< A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, T3 extends PipelineTransform<T2, any>, T4 extends PipelineTransform<T3, any>, B extends PipelineDestination<T4, any> >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream; function pipeline( streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, callback?: (err: NodeJS.ErrnoException | null) => void ): NodeJS.WritableStream; function pipeline( stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void)> ): NodeJS.WritableStream; namespace pipeline { function __promisify__<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(source: A, destination: B, options?: PipelineOptions): PipelinePromise<B>; function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>( source: A, transform1: T1, destination: B, options?: PipelineOptions ): PipelinePromise<B>; function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>( source: A, transform1: T1, transform2: T2, destination: B, options?: PipelineOptions ): PipelinePromise<B>; function __promisify__< A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, T3 extends PipelineTransform<T2, any>, B extends PipelineDestination<T3, any> >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise<B>; function __promisify__< A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, T3 extends PipelineTransform<T2, any>, T4 extends PipelineTransform<T3, any>, B extends PipelineDestination<T4, any> >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise<B>; function __promisify__(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, options?: PipelineOptions): Promise<void>; function __promisify__( stream1: NodeJS.ReadableStream, stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream, ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | PipelineOptions> ): Promise<void>; } interface Pipe { close(): void; hasRef(): boolean; ref(): void; unref(): void; } /** * Returns whether the stream has encountered an error. * @since v17.3.0 */ function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean; /** * Returns whether the stream is readable. * @since v17.4.0 */ function isReadable(stream: Readable | NodeJS.ReadableStream): boolean; const promises: typeof streamPromises; const consumers: typeof streamConsumers; } export = internal; } declare module 'node:stream' { import stream = require('stream'); export = stream; }
PypiClean
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/plugins/moment/locale/nn.js
;(function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' && typeof require === 'function' ? factory(require('../moment')) : typeof define === 'function' && define.amd ? define(['../moment'], factory) : factory(global.moment) }(this, (function (moment) { 'use strict'; //! moment.js locale configuration var nn = moment.defineLocale('nn', { months: 'januar_februar_mars_april_mai_juni_juli_august_september_oktober_november_desember'.split( '_' ), monthsShort: 'jan._feb._mars_apr._mai_juni_juli_aug._sep._okt._nov._des.'.split( '_' ), monthsParseExact: true, weekdays: 'sundag_måndag_tysdag_onsdag_torsdag_fredag_laurdag'.split('_'), weekdaysShort: 'su._må._ty._on._to._fr._lau.'.split('_'), weekdaysMin: 'su_må_ty_on_to_fr_la'.split('_'), weekdaysParseExact: true, longDateFormat: { LT: 'HH:mm', LTS: 'HH:mm:ss', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY [kl.] H:mm', LLLL: 'dddd D. MMMM YYYY [kl.] HH:mm', }, calendar: { sameDay: '[I dag klokka] LT', nextDay: '[I morgon klokka] LT', nextWeek: 'dddd [klokka] LT', lastDay: '[I går klokka] LT', lastWeek: '[Føregåande] dddd [klokka] LT', sameElse: 'L', }, relativeTime: { future: 'om %s', past: '%s sidan', s: 'nokre sekund', ss: '%d sekund', m: 'eit minutt', mm: '%d minutt', h: 'ein time', hh: '%d timar', d: 'ein dag', dd: '%d dagar', w: 'ei veke', ww: '%d veker', M: 'ein månad', MM: '%d månader', y: 'eit år', yy: '%d år', }, dayOfMonthOrdinalParse: /\d{1,2}\./, ordinal: '%d.', week: { dow: 1, // Monday is the first day of the week. doy: 4, // The week that contains Jan 4th is the first week of the year. }, }); return nn; })));
PypiClean
/Adafruit-PlatformDetect-3.49.0.tar.gz/Adafruit-PlatformDetect-3.49.0/docs/index.rst
.. include:: ../README.rst Table of Contents ================= .. toctree:: :maxdepth: 4 :hidden: self .. toctree:: :caption: API Reference :maxdepth: 3 api .. toctree:: :caption: Tutorials Adding a Single Board Computer to PlatformDetect for Blinka <https://learn.adafruit.com/adding-a-single-board-computer-to-platformdetect-for-blinka> .. toctree:: :caption: Related Products .. toctree:: :caption: Other Links Download <https://github.com/adafruit/Adafruit_Python_PlatformDetect/releases/latest> CircuitPython Reference Documentation <https://circuitpython.readthedocs.io> CircuitPython Support Forum <https://forums.adafruit.com/viewforum.php?f=60> Discord Chat <https://adafru.it/discord> Adafruit Learning System <https://learn.adafruit.com> Adafruit Blog <https://blog.adafruit.com> Adafruit Store <https://www.adafruit.com> Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search`
PypiClean
/DisplayFx-1.1.3.tar.gz/DisplayFx-1.1.3/README.rst
Inset headline here Insert description here ======= Testing ======= This project uses ``pytest`` to run tests and also to test docstring examples. Install the test dependencies. .. code-block:: bash $ pip install -r requirements_test.txt Run the tests. ========== Developing ========== This project uses ``black`` to format code and ``flake8`` for linting. We also support ``pre-commit`` to ensure these have been run. To configure your local environment please install these development dependencies and set up the commit hooks. .. code-block:: bash $ pip install black flake8 pre-commit $ pre-commit install ========= Releasing ========= Releases are published automatically when a tag is pushed to GitHub. .. code-block:: bash # Set next version number export RELEASE = x.x.x # Create tags git commit --allow -empty -m "Release $RELEASE" git tag -a $RELEASE -m "Version $RELEASE" # Push git push upstream --tags
PypiClean
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/src/htsql/tweak/shell/default/act.py
from ....core.context import context from ....core.adapter import adapt from ....core.cmd.act import Act, RenderAction, act from ....core.cmd.command import UniversalCmd, DefaultCmd from ....core.cmd.summon import Recognize from ..command import ShellCmd from ....core.syn.parse import parse from ....core.syn.syntax import SkipSyntax from ....core.error import Error import re escape_pattern = r"""%(?:(?P<code>[0-9A-Fa-f]{2})|..)""" escape_regexp = re.compile(escape_pattern) def unquote(query): def replace(match): code = match.group('code') if not code: return match.group() code = int(code, 16) if code == 0x00: return match.group() return chr(code) return escape_regexp.sub(replace, query) class ShellRenderUniversal(Act): adapt(UniversalCmd, RenderAction) def __call__(self): addon = context.app.tweak.shell.default command = None content_type = "" if 'HTTP_ACCEPT' in self.action.environ: content_types = self.action.environ['HTTP_ACCEPT'].split(',') if len(content_types) == 1: [content_type] = content_types if ';' in content_type: content_type = content_type.split(';', 1)[0] content_type = content_type.strip() else: content_type = "*/*" if content_type != "*/*": return super(ShellRenderUniversal, self).__call__() try: syntax = parse(self.command.query) if addon.on_root and isinstance(syntax, SkipSyntax): command = ShellCmd(is_implicit=True) else: command = Recognize.__invoke__(syntax) if command is None: if (addon.on_default and not isinstance(syntax, SkipSyntax)): query = unquote(self.command.query) query = query.decode('utf-8', 'replace') command = ShellCmd(query, is_implicit=True) else: command = DefaultCmd(syntax) return act(command, self.action) except Error: if not addon.on_error: raise query = unquote(self.command.query) query = query.decode('utf-8', 'replace') command = ShellCmd(query, is_implicit=True) return act(command, self.action)
PypiClean
/Django-4.2.4.tar.gz/Django-4.2.4/django/contrib/gis/db/backends/spatialite/base.py
from ctypes.util import find_library from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db.backends.sqlite3.base import DatabaseWrapper as SQLiteDatabaseWrapper from .client import SpatiaLiteClient from .features import DatabaseFeatures from .introspection import SpatiaLiteIntrospection from .operations import SpatiaLiteOperations from .schema import SpatialiteSchemaEditor class DatabaseWrapper(SQLiteDatabaseWrapper): SchemaEditorClass = SpatialiteSchemaEditor # Classes instantiated in __init__(). client_class = SpatiaLiteClient features_class = DatabaseFeatures introspection_class = SpatiaLiteIntrospection ops_class = SpatiaLiteOperations def __init__(self, *args, **kwargs): # Trying to find the location of the SpatiaLite library. # Here we are figuring out the path to the SpatiaLite library # (`libspatialite`). If it's not in the system library path (e.g., it # cannot be found by `ctypes.util.find_library`), then it may be set # manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting. self.lib_spatialite_paths = [ name for name in [ getattr(settings, "SPATIALITE_LIBRARY_PATH", None), "mod_spatialite.so", "mod_spatialite", find_library("spatialite"), ] if name is not None ] super().__init__(*args, **kwargs) def get_new_connection(self, conn_params): conn = super().get_new_connection(conn_params) # Enabling extension loading on the SQLite connection. try: conn.enable_load_extension(True) except AttributeError: raise ImproperlyConfigured( "SpatiaLite requires SQLite to be configured to allow " "extension loading." ) # Load the SpatiaLite library extension on the connection. for path in self.lib_spatialite_paths: try: conn.load_extension(path) except Exception: if getattr(settings, "SPATIALITE_LIBRARY_PATH", None): raise ImproperlyConfigured( "Unable to load the SpatiaLite library extension " "as specified in your SPATIALITE_LIBRARY_PATH setting." ) continue else: break else: raise ImproperlyConfigured( "Unable to load the SpatiaLite library extension. " "Library names tried: %s" % ", ".join(self.lib_spatialite_paths) ) return conn def prepare_database(self): super().prepare_database() # Check if spatial metadata have been initialized in the database with self.cursor() as cursor: cursor.execute("PRAGMA table_info(geometry_columns);") if cursor.fetchall() == []: if self.ops.spatial_version < (5,): cursor.execute("SELECT InitSpatialMetaData(1)") else: cursor.execute("SELECT InitSpatialMetaDataFull(1)")
PypiClean
/Flask-Weixin-0.5.0.tar.gz/Flask-Weixin-0.5.0/flask_weixin.py
import time import hashlib from datetime import datetime from collections import namedtuple try: from lxml import etree except ImportError: from xml.etree import cElementTree as etree except ImportError: from xml.etree import ElementTree as etree try: from flask import current_app, request, Response except ImportError: current_app = None request = None Response = None __all__ = ('Weixin',) __version__ = '0.5.0' __author__ = 'Hsiaoming Yang <me@lepture.com>' StandaloneApplication = namedtuple('StandaloneApplication', ['config']) class Weixin(object): """Interface for mp.weixin.qq.com http://mp.weixin.qq.com/wiki/index.php """ def __init__(self, app=None): self._registry = {} self._registry_without_key = [] if isinstance(app, dict): # flask-weixin can be used without flask app = StandaloneApplication(config=app) if app is None: self.app = current_app else: self.init_app(app) self.app = app def init_app(self, app): app.config.setdefault('WEIXIN_TOKEN', None) app.config.setdefault('WEIXIN_SENDER', None) app.config.setdefault('WEIXIN_EXPIRES_IN', 0) @property def token(self): return self.app.config['WEIXIN_TOKEN'] @property def sender(self): return self.app.config['WEIXIN_SENDER'] @property def expires_in(self): return self.app.config['WEIXIN_EXPIRES_IN'] def validate(self, signature, timestamp, nonce): """Validate request signature. :param signature: A string signature parameter sent by weixin. :param timestamp: A int timestamp parameter sent by weixin. :param nonce: A int nonce parameter sent by weixin. """ if not self.token: raise RuntimeError('WEIXIN_TOKEN is missing') if self.expires_in: try: timestamp = int(timestamp) except (ValueError, TypeError): # fake timestamp return False delta = time.time() - timestamp if delta < 0: # this is a fake timestamp return False if delta > self.expires_in: # expired timestamp return False values = [self.token, str(timestamp), str(nonce)] s = ''.join(sorted(values)) hsh = hashlib.sha1(s.encode('utf-8')).hexdigest() return signature == hsh def parse(self, content): """Parse xml body sent by weixin. :param content: A text of xml body. """ raw = {} try: root = etree.fromstring(content) except SyntaxError as e: raise ValueError(*e.args) for child in root: raw[child.tag] = child.text formatted = self.format(raw) msg_type = formatted['type'] msg_parser = getattr(self, 'parse_%s' % msg_type, None) if callable(msg_parser): parsed = msg_parser(raw) else: parsed = self.parse_invalid_type(raw) formatted.update(parsed) return formatted def format(self, kwargs): timestamp = int(kwargs.get('CreateTime', 0)) return { 'id': kwargs.get('MsgId'), 'timestamp': timestamp, 'receiver': kwargs.get('ToUserName'), 'sender': kwargs.get('FromUserName'), 'type': kwargs.get('MsgType'), 'time': datetime.fromtimestamp(timestamp), } def parse_text(self, raw): return {'content': raw.get('Content')} def parse_image(self, raw): return {'picurl': raw.get('PicUrl')} def parse_location(self, raw): return { 'location_x': raw.get('Location_X'), 'location_y': raw.get('Location_Y'), 'scale': int(raw.get('Scale', 0)), 'label': raw.get('Label'), } def parse_link(self, raw): return { 'title': raw.get('Title'), 'description': raw.get('Description'), 'url': raw.get('url'), } def parse_event(self, raw): return { 'event': raw.get('Event'), 'event_key': raw.get('EventKey'), 'ticket': raw.get('Ticket'), 'latitude': raw.get('Latitude'), 'longitude': raw.get('Longitude'), 'precision': raw.get('Precision'), } def parse_voice(self, raw): return { 'media_id': raw.get('MediaID'), 'format': raw.get('Format'), 'recognition': raw.get('Recognition'), } def parse_invalid_type(self, raw): return {} def reply(self, username, type='text', sender=None, **kwargs): """Create the reply text for weixin. The reply varies per reply type. The acceptable types are `text`, `music` and `news`. Each type accepts different parameters, but they share some common parameters: * username: the receiver's username * type: the reply type, aka text, music and news * sender: sender is optional if you have a default value Text reply requires an additional parameter of `content`. Music reply requires 4 more parameters: * title: A string for music title * description: A string for music description * music_url: A link of the music * hq_music_url: A link of the high quality music News reply requires an additional parameter of `articles`, which is a list/tuple of articles, each one is a dict: * title: A string for article title * description: A string for article description * picurl: A link for article cover image * url: A link for article url """ sender = sender or self.sender if not sender: raise RuntimeError('WEIXIN_SENDER or sender argument is missing') if type == 'text': content = kwargs.get('content', '') return text_reply(username, sender, content) if type == 'music': values = {} for k in ('title', 'description', 'music_url', 'hq_music_url'): values[k] = kwargs.get(k) return music_reply(username, sender, **values) if type == 'news': items = kwargs.get('articles', []) return news_reply(username, sender, *items) if type == 'customer_service': service_account = kwargs.get('service_account', None) return transfer_customer_service_reply(username, sender, service_account) def register(self, key=None, func=None, **kwargs): """Register a command helper function. You can register the function:: def print_help(**kwargs): username = kwargs.get('sender') sender = kwargs.get('receiver') return weixin.reply( username, sender=sender, content='text reply' ) weixin.register('help', print_help) It is also accessible as a decorator:: @weixin.register('help') def print_help(*args, **kwargs): username = kwargs.get('sender') sender = kwargs.get('receiver') return weixin.reply( username, sender=sender, content='text reply' ) """ if func: if key is None: limitation = frozenset(kwargs.items()) self._registry_without_key.append((func, limitation)) else: self._registry[key] = func return func return self.__call__(key, **kwargs) def __call__(self, key, **kwargs): """Register a reply function. Only available as a decorator:: @weixin('help') def print_help(*args, **kwargs): username = kwargs.get('sender') sender = kwargs.get('receiver') return weixin.reply( username, sender=sender, content='text reply' ) """ def wrapper(func): self.register(key, func=func, **kwargs) return func return wrapper def view_func(self): """Default view function for Flask app. This is a simple implementation for view func, you can add it to your Flask app:: weixin = Weixin(app) app.add_url_rule('/', view_func=weixin.view_func) """ if request is None: raise RuntimeError('view_func need Flask be installed') signature = request.args.get('signature') timestamp = request.args.get('timestamp') nonce = request.args.get('nonce') if not self.validate(signature, timestamp, nonce): return 'signature failed', 400 if request.method == 'GET': echostr = request.args.get('echostr', '') return echostr try: ret = self.parse(request.data) except ValueError: return 'invalid', 400 if 'type' not in ret: # not a valid message return 'invalid', 400 if ret['type'] == 'text' and ret['content'] in self._registry: func = self._registry[ret['content']] else: ret_set = frozenset(ret.items()) matched_rules = ( _func for _func, _limitation in self._registry_without_key if _limitation.issubset(ret_set)) func = next(matched_rules, None) # first matched rule if func is None: if '*' in self._registry: func = self._registry['*'] else: func = 'failed' if callable(func): text = func(**ret) else: # plain text text = self.reply( username=ret['sender'], sender=ret['receiver'], content=func, ) return Response(text, content_type='text/xml; charset=utf-8') view_func.methods = ['GET', 'POST'] def text_reply(username, sender, content): shared = _shared_reply(username, sender, 'text') template = '<xml>%s<Content><![CDATA[%s]]></Content></xml>' return template % (shared, content) def music_reply(username, sender, **kwargs): kwargs['shared'] = _shared_reply(username, sender, 'music') template = ( '<xml>' '%(shared)s' '<Music>' '<Title><![CDATA[%(title)s]]></Title>' '<Description><![CDATA[%(description)s]]></Description>' '<MusicUrl><![CDATA[%(music_url)s]]></MusicUrl>' '<HQMusicUrl><![CDATA[%(hq_music_url)s]]></HQMusicUrl>' '</Music>' '</xml>' ) return template % kwargs def news_reply(username, sender, *items): item_template = ( '<item>' '<Title><![CDATA[%(title)s]]></Title>' '<Description><![CDATA[%(description)s]]></Description>' '<PicUrl><![CDATA[%(picurl)s]]></PicUrl>' '<Url><![CDATA[%(url)s]]></Url>' '</item>' ) articles = [item_template % o for o in items] template = ( '<xml>' '%(shared)s' '<ArticleCount>%(count)d</ArticleCount>' '<Articles>%(articles)s</Articles>' '</xml>' ) dct = { 'shared': _shared_reply(username, sender, 'news'), 'count': len(items), 'articles': ''.join(articles) } return template % dct def transfer_customer_service_reply(username, sender, service_account): template = ( '<xml>%(shared)s' '%(transfer_info)s</xml>') transfer_info = '' if service_account: transfer_info = ( '<TransInfo>' '<KfAccount>![CDATA[%s]]</KfAccount>' '</TransInfo>') % service_account dct = { 'shared': _shared_reply(username, sender, type='transfer_customer_service'), 'transfer_info': transfer_info } return template % dct def _shared_reply(username, sender, type): dct = { 'username': username, 'sender': sender, 'type': type, 'timestamp': int(time.time()), } template = ( '<ToUserName><![CDATA[%(username)s]]></ToUserName>' '<FromUserName><![CDATA[%(sender)s]]></FromUserName>' '<CreateTime>%(timestamp)d</CreateTime>' '<MsgType><![CDATA[%(type)s]]></MsgType>' ) return template % dct
PypiClean
/Buildpan_CLI-1.0-py3-none-any.whl/buildpan/platform_installer.py
import subprocess import sys from buildpan import setting import datetime, requests info = setting.info fetch_log = info["FETCH_LOG_URL"] def node_installer(node_ver, project_id, repo_name, username): ''' node installer this function to be called for Linux machine ''' try: client_os=sys.platform curtime = datetime.datetime.now() if client_os == "linux": subprocess.run("curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.38.0/install.sh | bash", shell= True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) subprocess.run("source ~/.nvm/nvm.sh", shell= True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,executable='/bin/bash') popen_arg = "nvm install "+ node_ver subprocess.call(['/bin/bash', '-i', '-c', popen_arg]) requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=Installer installed'+'&status=success&operation=platform installer') if node_ver != "latest": popen_arg = "nvm use "+ node_ver subprocess.call(['/bin/bash', '-i', '-c', popen_arg]) elif client_os == "win32" or client_os == "cygwin": popen_arg = "nvm install "+ node_ver result = subprocess.run(popen_arg ,shell= True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=Installer : '+str(result.stdout.decode())+'&status=success&operation=platform installer') if node_ver != "latest": popen_arg = "nvm use "+ node_ver subprocess.run(popen_arg ,shell= True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except Exception as e: requests.post(fetch_log + "?" +'project_id='+project_id+'&repo_name='+repo_name+'&Time='+str(curtime)+'&user_name='+username+'&message=Installer : '+str(e)+'&status=failed&operation=platform installer')
PypiClean
/Evmlab-0.3.0.0.1-py3-none-any.whl/evmlab/genesis.py
import json def mktemp(prefix = "", suffix=""): import random, string, tempfile rand = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(8)]) temp_path = "%s/%s%s%s" % (tempfile.gettempdir(), prefix, rand, suffix) return temp_path class Genesis(object): """ Utility to create genesis files""" def __init__(self): self.alloc = {} self.coinbase = "0x0000000000000000000000000000000000000000" self.timestamp = "0x00" self.gasLimit = "0x3D0900" self.difficulty = "0x01" self.blockNumber = 0 self.config = { "eip150Block": 0, "eip158Block": 0, "eip155Block": 0, "homesteadBlock": 0, "daoForkBlock": 0, "byzantiumBlock" : 2000, } def geth(self): g = { "nonce": "0x0000000000000000", "difficulty": self.difficulty, "mixhash": "0x0000000000000000000000000000000000000000000000000000000000000000", "coinbase": self.coinbase, "timestamp": self.timestamp, "number": "0x{:02x}".format(self.blockNumber), "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "extraData": "0x0000000000000000000000000000000000000000000000000000000000000000", "gasLimit": self.gasLimit, "alloc": self.alloc, "config": self.config, } return g def parity(self): builtins = { "0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } }, "0000000000000000000000000000000000000002": { "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } }, "0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } }, "0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } }, "0000000000000000000000000000000000000005": { "builtin": {"activate_at": self.config['byzantiumBlock'], "name": "modexp", "pricing": { "modexp": { "divisor": 20 }}}}, "0000000000000000000000000000000000000006": { "builtin": { "activate_at": self.config['byzantiumBlock'], "name": "alt_bn128_add", "pricing": { "linear": { "base": 500, "word": 0 }}}}, "0000000000000000000000000000000000000007": { "builtin": { "activate_at": self.config['byzantiumBlock'], "name": "alt_bn128_mul", "pricing": { "linear": { "base": 40000, "word": 0 }}}}, "0000000000000000000000000000000000000008": { "builtin": { "activate_at": self.config['byzantiumBlock'], "name": "alt_bn128_pairing", "pricing": { "alt_bn128_pairing": { "base": 100000, "pair": 80000 }}}}, } builtins.update(self.alloc) g = { "name": "lab", "engine": { "Ethash": { "params": { "minimumDifficulty": "0x020000", "difficultyBoundDivisor": "0x0800", "durationLimit": "0x0d", "blockReward": "0x4563918244F40000", "registrar": "", "frontierCompatibilityModeLimit": "0x0", "eip150Transition" : 0, "eip155Transition" : 0, "eip160Transition" : 0, "eip161abcTransition": 0, "eip161dTransition" : 0, "eip649Reward":"0x29A2241AF62C0000", "eip649Transition" : self.config['byzantiumBlock'], "eip100bTransition": self.config['byzantiumBlock'], } } }, "genesis": { "seal": { "ethereum": { "nonce": "0x0000000000000042", "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000" } }, "difficulty": "0x400", "author": "0x3333333333333333333333333333333333333333", "timestamp": "0x0", "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", "extraData": "0x0", "gasLimit": "0x8000000", }, "params": { "maxCodeSize": 24576, "gasLimitBoundDivisor": "0x0400", "accountStartNonce": "0x0", "maximumExtraDataSize": "0x20", "minGasLimit": "0x1388", "networkID" : "0x0", "eip98Transition": "0x0", "eip86Transition": "0x0", "eip140Transition" : self.config['byzantiumBlock'], "eip211Transition" : self.config['byzantiumBlock'], "eip214Transition" : self.config['byzantiumBlock'], "eip658Transition" : self.config['byzantiumBlock'], # Also new pre }, "accounts": builtins, } return g def has(self, account): return account.lower() in self.alloc.keys() def setCoinbase(self, coinbase): self.coinbase = coinbase def setGasLimit(self, gasLimit): self.gasLimit = gasLimit def setTimestamp(self, timestamp): self.timestamp = timestamp def setDifficulty(self, difficulty): self.difficulty = difficulty def setBlockNumber(self, blockNumber): self.blockNumber = int(blockNumber, 16) def setConfigHomestead(self): self.config['byzantiumBlock'] = 2000 self.config['eip158Block'] = 2000 self.config['eip155Block'] = 2000 self.config['eip150Block'] = 2000 self.config['homesteadBlock'] = 0 def setConfigTangerineWhistle(self): self.config['byzantiumBlock'] = 2000 self.config['eip158Block'] = 2000 self.config['eip155Block'] = 2000 self.config['eip150Block'] = 0 self.config['homesteadBlock'] = 0 def setConfigSpuriousDragon(self): self.config['byzantiumBlock'] = 2000 self.config['eip158Block'] = 0 self.config['eip155Block'] = 0 self.config['eip150Block'] = 0 self.config['homesteadBlock'] = 0 def setConfigMetropolis(self): self.config['byzantiumBlock'] = 0 self.config['eip158Block'] = 0 self.config['eip155Block'] = 0 self.config['eip150Block'] = 0 self.config['homesteadBlock'] = 0 def addPrestateAccount(self, account): self.alloc[account['address'].lower()] = { "balance" : account['balance'], "code" : account['code'], "nonce" : account['nonce'], } if 'storage' in account: self.alloc[account['address'].lower()]['storage'] = {} for key in account['storage']: self.alloc[account['address'].lower()]['storage'][key] = account['storage'][key] def add(self, account): """ Data format from EtherChain: { "address": "0x6090a6e47849629b7245dfa1ca21d94cd15878ef", "balance": 0, "nonce": null, "code": "0x60...", "name": null, "storage": null, "firstSeen": "2017-04-26T19:12:56.000Z" } """ n = account['nonce'] if n is None: n = 0 b ="0x%x" % (account['balance']) code = account['code'].hex() # code is a HexBytes object self.alloc[account['address'].lower()] = { "balance" : b, "code" : code, "nonce" : hex(n), } def codeAt(self,addr): addr = addr.lower() if addr in self.alloc.keys(): acc = self.alloc[addr] if 'code' in acc: return acc['code'] return "" def addStorage(self, account, key, value): ac = self.alloc[account.lower()] key = "0x{:064x}".format(int(key,16)) if 'storage' not in ac.keys(): ac['storage'] = {} if 'hex' in dir(value): value = value.hex() else: value = "0x{:064x}".format(int(value,16)) ac['storage'][key]=value def export(self,prefix="genesis"): geth_genesis = self.export_geth(prefix="%s-genesis-geth_" % prefix) parity_genesis = self.export_parity(prefix="%s-genesis-parity_" % prefix) return (geth_genesis, parity_genesis) def export_geth(self, prefix = None): temp_path = mktemp(prefix = prefix, suffix=".json") with open(temp_path, 'w') as f : json.dump(self.geth(),f) return temp_path def export_parity(self, prefix = None): temp_path = mktemp(prefix = prefix, suffix=".json") with open(temp_path, 'w') as f : json.dump(self.parity(),f) return temp_path def prettyprint(self): import pprint pp = pprint.PrettyPrinter(indent=2) pp.pprint(self.geth())
PypiClean
/FairNLP-5.1.0.tar.gz/FairNLP-5.1.0/FNLP/LanguageEngines/__init__.py
from datetime import datetime from F import LIST from F.CLASS import FairClass from FNLP import Merge class BaseModel(FairClass): input_models = [] webpage_models = [] def add_input_models(self, input_models:list): self.input_models = input_models def get_content(self, model): return self.get_dict("content", model, None) def get_date(self, model): return self.get_dict("date", model, None) def absorb_model(self, model): cm = model cm_vars = cm.get_list_of_variables() for var in cm_vars: cm_value = cm.get_attribute(var) self_vars = self.get_list_of_variables() if var in self_vars: self_value = self.get_attribute(var) result = None # create ignore list, like pid. if not cm_value or var in ['pid']: continue if str(var).startswith("original_"): continue if str(var).startswith("_"): continue if str(var).startswith("input"): continue if str(var).startswith("input"): continue if type(cm_value) in [int]: result = int(cm_value) + int(self_value) elif type(cm_value) in [list]: result = LIST.flatten(cm_value, self_value) elif type(cm_value) in [dict]: cm_value: dict key = next(iter(cm_value)) if type(key) in [datetime]: continue result = Merge.add_word_counts(self_value, cm_value) self.set_variable(var, result) else: self.set_variable(var, cm_value) """ Import/Export """ def import_model(self, obj:dict): """ Load JSON Model """ self.fromJson(obj) def export_model(self): """ Export Model as JSON""" return self.toJson(removeNone=True) def print_model(self): print(self.toJson())
PypiClean
/Gbtestapi0.4-0.1a10.tar.gz/Gbtestapi0.4-0.1a10/src/gailbot/services/organizer/settings/interface/googleInterface.py
import os from pydantic import BaseModel, ValidationError from typing import Dict, Union from .engineSettingInterface import EngineSettingInterface from gailbot.core.utils.logger import makelogger from gailbot.core.engines.google import Google from gailbot.core.utils.general import ( copy, is_file, is_directory, make_dir, get_name, get_extension, ) from gailbot.configs import workspace_config_loader API_KEY_DIR = workspace_config_loader().engine_ws.google_api logger = makelogger("google_interface") class ValidateGoogle(BaseModel): engine: str google_api_key: str class Transcribe(BaseModel): """ NOTE: google does not support additional kwargs in transcription """ pass class Init(BaseModel): # the path to a file that stores the google api key google_api_key: str class GoogleInterface(EngineSettingInterface): """ Interface for the Google speech to text engine """ engine: str init: Init = None transcribe: Transcribe = None def load_google_setting(setting: Dict[str, str]) -> Union[bool, EngineSettingInterface]: """given a dictionary, load the dictionary as a google setting Args: setting (Dict[str, str]): the dictionary that contains the setting data Returns: Union[bool , SettingInterface]: if the setting dictionary is validated by the google setting interface, return the google setting interface as an instance of SettingInterface, else return false """ logger.info(setting) if not "engine" in setting.keys() or setting["engine"] != "google": return False try: setting = setting.copy() validate = ValidateGoogle(**setting) if not is_directory(API_KEY_DIR): make_dir(API_KEY_DIR) # check that the api key is valid assert Google.is_valid_google_api(setting["google_api_key"]) # save a copied version of the api key file to the workspace copied_api = os.path.join( API_KEY_DIR, get_name(setting["google_api_key"]) + ".json" ) setting["google_api_key"] = copy(setting["google_api_key"], copied_api) google_set = dict() google_set["engine"] = setting.pop("engine") google_set["init"] = dict() google_set["transcribe"] = dict() google_set["init"].update(setting) google_setting = GoogleInterface(**google_set) return google_setting except ValidationError as e: logger.error(e, exc_info=e) return False
PypiClean
/123_object_detection-0.1.tar.gz/123_object_detection-0.1/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import functools from six.moves import range import tensorflow.compat.v1 as tf import tf_slim as slim from object_detection.meta_architectures import ssd_meta_arch from object_detection.models import feature_map_generators from object_detection.utils import context_manager from object_detection.utils import ops from object_detection.utils import shape_utils from nets import mobilenet_v1 # A modified config of mobilenet v1 that makes it more detection friendly, def _create_modified_mobilenet_config(): conv_defs = copy.deepcopy(mobilenet_v1.MOBILENETV1_CONV_DEFS) conv_defs[-2] = mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=2, depth=512) conv_defs[-1] = mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=1, depth=256) return conv_defs class SSDMobileNetV1FpnFeatureExtractor(ssd_meta_arch.SSDFeatureExtractor): """SSD Feature Extractor using MobilenetV1 FPN features.""" def __init__(self, is_training, depth_multiplier, min_depth, pad_to_multiple, conv_hyperparams_fn, fpn_min_level=3, fpn_max_level=7, additional_layer_depth=256, reuse_weights=None, use_explicit_padding=False, use_depthwise=False, use_native_resize_op=False, override_base_feature_extractor_hyperparams=False): """SSD FPN feature extractor based on Mobilenet v1 architecture. Args: is_training: whether the network is in training mode. depth_multiplier: float depth multiplier for feature extractor. min_depth: minimum feature extractor depth. pad_to_multiple: the nearest multiple to zero pad the input height and width dimensions to. conv_hyperparams_fn: A function to construct tf slim arg_scope for conv2d and separable_conv2d ops in the layers that are added on top of the base feature extractor. fpn_min_level: the highest resolution feature map to use in FPN. The valid values are {2, 3, 4, 5} which map to MobileNet v1 layers {Conv2d_3_pointwise, Conv2d_5_pointwise, Conv2d_11_pointwise, Conv2d_13_pointwise}, respectively. fpn_max_level: the smallest resolution feature map to construct or use in FPN. FPN constructions uses features maps starting from fpn_min_level upto the fpn_max_level. In the case that there are not enough feature maps in the backbone network, additional feature maps are created by applying stride 2 convolutions until we get the desired number of fpn levels. additional_layer_depth: additional feature map layer channel depth. reuse_weights: whether to reuse variables. Default is None. use_explicit_padding: Whether to use explicit padding when extracting features. Default is False. use_depthwise: Whether to use depthwise convolutions. Default is False. use_native_resize_op: Whether to use tf.image.nearest_neighbor_resize to do upsampling in FPN. Default is false. override_base_feature_extractor_hyperparams: Whether to override hyperparameters of the base feature extractor with the one from `conv_hyperparams_fn`. """ super(SSDMobileNetV1FpnFeatureExtractor, self).__init__( is_training=is_training, depth_multiplier=depth_multiplier, min_depth=min_depth, pad_to_multiple=pad_to_multiple, conv_hyperparams_fn=conv_hyperparams_fn, reuse_weights=reuse_weights, use_explicit_padding=use_explicit_padding, use_depthwise=use_depthwise, override_base_feature_extractor_hyperparams= override_base_feature_extractor_hyperparams) self._fpn_min_level = fpn_min_level self._fpn_max_level = fpn_max_level self._additional_layer_depth = additional_layer_depth self._conv_defs = None if self._use_depthwise: self._conv_defs = _create_modified_mobilenet_config() self._use_native_resize_op = use_native_resize_op def preprocess(self, resized_inputs): """SSD preprocessing. Maps pixel values to the range [-1, 1]. Args: resized_inputs: a [batch, height, width, channels] float tensor representing a batch of images. Returns: preprocessed_inputs: a [batch, height, width, channels] float tensor representing a batch of images. """ return (2.0 / 255.0) * resized_inputs - 1.0 def extract_features(self, preprocessed_inputs): """Extract features from preprocessed inputs. Args: preprocessed_inputs: a [batch, height, width, channels] float tensor representing a batch of images. Returns: feature_maps: a list of tensors where the ith tensor has shape [batch, height_i, width_i, depth_i] """ preprocessed_inputs = shape_utils.check_min_image_dim( 33, preprocessed_inputs) with tf.variable_scope('MobilenetV1', reuse=self._reuse_weights) as scope: with slim.arg_scope( mobilenet_v1.mobilenet_v1_arg_scope( is_training=None, regularize_depthwise=True)): with (slim.arg_scope(self._conv_hyperparams_fn()) if self._override_base_feature_extractor_hyperparams else context_manager.IdentityContextManager()): _, image_features = mobilenet_v1.mobilenet_v1_base( ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple), final_endpoint='Conv2d_13_pointwise', min_depth=self._min_depth, depth_multiplier=self._depth_multiplier, conv_defs=self._conv_defs, use_explicit_padding=self._use_explicit_padding, scope=scope) depth_fn = lambda d: max(int(d * self._depth_multiplier), self._min_depth) with slim.arg_scope(self._conv_hyperparams_fn()): with tf.variable_scope('fpn', reuse=self._reuse_weights): feature_blocks = [ 'Conv2d_3_pointwise', 'Conv2d_5_pointwise', 'Conv2d_11_pointwise', 'Conv2d_13_pointwise' ] base_fpn_max_level = min(self._fpn_max_level, 5) feature_block_list = [] for level in range(self._fpn_min_level, base_fpn_max_level + 1): feature_block_list.append(feature_blocks[level - 2]) fpn_features = feature_map_generators.fpn_top_down_feature_maps( [(key, image_features[key]) for key in feature_block_list], depth=depth_fn(self._additional_layer_depth), use_depthwise=self._use_depthwise, use_explicit_padding=self._use_explicit_padding, use_native_resize_op=self._use_native_resize_op) feature_maps = [] for level in range(self._fpn_min_level, base_fpn_max_level + 1): feature_maps.append(fpn_features['top_down_{}'.format( feature_blocks[level - 2])]) last_feature_map = fpn_features['top_down_{}'.format( feature_blocks[base_fpn_max_level - 2])] # Construct coarse features padding = 'VALID' if self._use_explicit_padding else 'SAME' kernel_size = 3 for i in range(base_fpn_max_level + 1, self._fpn_max_level + 1): if self._use_depthwise: conv_op = functools.partial( slim.separable_conv2d, depth_multiplier=1) else: conv_op = slim.conv2d if self._use_explicit_padding: last_feature_map = ops.fixed_padding( last_feature_map, kernel_size) last_feature_map = conv_op( last_feature_map, num_outputs=depth_fn(self._additional_layer_depth), kernel_size=[kernel_size, kernel_size], stride=2, padding=padding, scope='bottom_up_Conv2d_{}'.format(i - base_fpn_max_level + 13)) feature_maps.append(last_feature_map) return feature_maps
PypiClean
/DobbyStock-0.1.tar.gz/DobbyStock-0.1/step2- test files/Test_Stock.ipynb
``` import unittest from main_package.Stock_main import * class TestStock(unittest.TestCase): @classmethod def setUpClass(cls): print('setupClass') def setUp(self): print('Set up') def test_get_high_price(self): stock = Stock() high_prices = stock.get_high_price() self.assertIsInstance(high_prices, list) self.assertGreaterEqual(max(high_prices), 201) self.assertLessEqual(min(high_prices), 500) def test_get_low_price(self): stock = Stock() low_prices = stock.get_low_price() self.assertIsInstance(low_prices, list) self.assertGreaterEqual(max(low_prices), 50) self.assertLessEqual(min(low_prices), 200) def test_get_volume(self): stock = Stock() volumes = stock.get_volume() self.assertIsInstance(volumes, list) self.assertGreaterEqual(max(volumes), 1) self.assertLessEqual(min(volumes), 200) def test_get_size(self): stock = Stock() size = stock.get_size() self.assertIsInstance(size, int) self.assertGreaterEqual(size, 0) def test_str(self): stock = Stock() stock_str = str(stock) self.assertIsInstance(stock_str, str) self.assertIn("high price list:", stock_str) self.assertIn("low price list:", stock_str) self.assertIn("volume list:", stock_str) def tearDown(self): print('Tear Down') @classmethod def tearDownClass(cls): print('teardownClass') if __name__ == '__main__': unittest.main() ```
PypiClean
/CubeLang-0.1.4-py3-none-any.whl/libcube/parser.py
from typing import Iterator, Dict from string import whitespace from .actions import Turn, Action, Rotate from .orientation import Side SIDE_LETTERS: Dict[str, Side] = { "L": Side.LEFT, "R": Side.RIGHT, "F": Side.FRONT, "B": Side.BACK, "U": Side.TOP, "D": Side.BOTTOM } ROTATE_LETTERS: Dict[str, Side] = { "X": Side.RIGHT, "Y": Side.TOP, "Z": Side.FRONT } ROTATE_REVERSE: Dict[Side, str] = { Side.RIGHT: "X", Side.LEFT: "X", Side.TOP: "Y", Side.BOTTOM: "Y", Side.FRONT: "Z", Side.BACK: "Z" } class ParsingError(Exception): def __init__(self, message: str, column: int) -> None: super(ParsingError, self).__init__(message) self.column = column class ParsingStateMachine: def __init__(self): self.actions = [] self.column = 0 self.action_type = None self.amount = 1 self.current_number = 0 self.number_present = False self.numbers = [] def yield_action(self): if self.action_type is None: return if self.action_type in ROTATE_LETTERS: side = ROTATE_LETTERS[self.action_type] if self.amount == 3: side = side.opposite() self.actions.append(Rotate(side, self.amount == 2)) else: action = Turn(SIDE_LETTERS[self.action_type], self.numbers if len(self.numbers) > 0 else 1, self.amount) self.actions.append(action) self.action_type = None self.numbers = [] self.amount = 1 self.number_present = False def _unexpected(self, char: str): if char == "\n": raise ParsingError(f"Unexpected end at {self.column + 1}", self.column) else: raise ParsingError(f"Unexpected character: '{char}' at {self.column + 1}", self.column) def state_action_type(self, char: str): self.yield_action() if char == "\n": return True, None elif char not in SIDE_LETTERS and char not in ROTATE_LETTERS: self._unexpected(char) else: self.action_type = char return True, self.state_action_spec def state_action_spec(self, char: str): if char in SIDE_LETTERS or char in ROTATE_LETTERS: return False, self.state_action_type if char == "'": self.amount = 3 return True, self.state_range_start elif char == "2": self.amount = 2 return True, self.state_range_start elif char == "[": return False, self.state_range_start elif char == "\n": return True, None else: self._unexpected(char) def state_range_start(self, char: str): if char == "[": if self.action_type in ROTATE_LETTERS: self._unexpected(char) return True, self.state_range_number else: return False, self.state_action_type def state_range_number(self, char: str): def next_number(force_number: bool = False): if not self.number_present and force_number: self._unexpected(char) elif self.number_present: self.numbers.append(self.current_number) self.current_number = 0 self.number_present = False if char.isdigit(): self.current_number = self.current_number * 10 + int(char) self.number_present = True elif char == ":": if not self.number_present and len(self.numbers) > 0: self._unexpected(char) next_number() self.numbers.append(...) elif char == ",": next_number(True) elif char == "]": if not(self.number_present or (len(self.numbers) > 0 and self.numbers[-1] == Ellipsis)): self._unexpected(char) next_number() return True, self.state_action_type else: self._unexpected(char) return True, self.state_range_number def parse(self, algorithm: str): algorithm = algorithm.translate({ord(x): None for x in whitespace}) + "\n" state = self.state_action_type self.column = 0 while self.column < len(algorithm): goto_next, state = state(algorithm[self.column]) if goto_next: self.column += 1 self.yield_action() def parse_actions(algorithm: str) -> Iterator[Action]: sm = ParsingStateMachine() sm.parse(algorithm) return sm.actions
PypiClean
/GandB-distributions-chauhan.shi-0.0.1.tar.gz/GandB-distributions-chauhan.shi-0.0.1/distributions/Binomialdistribution.py
import math import matplotlib.pyplot as plt from .Generaldistribution import Distribution class Binomial(Distribution): """ Binomial distribution class for calculating and visualizing a Binomial distribution. Attributes: mean (float) representing the mean value of the distribution stdev (float) representing the standard deviation of the distribution data_list (list of floats) a list of floats to be extracted from the data file p (float) representing the probability of an event occurring n (int) number of trials TODO: Fill out all functions below """ def __init__(self, prob=.5, size=20): self.n = size self.p = prob Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev()) def calculate_mean(self): """Function to calculate the mean from p and n Args: None Returns: float: mean of the data set """ self.mean = self.p * self.n return self.mean def calculate_stdev(self): """Function to calculate the standard deviation from p and n. Args: None Returns: float: standard deviation of the data set """ self.stdev = math.sqrt(self.n * self.p * (1 - self.p)) return self.stdev def replace_stats_with_data(self): """Function to calculate p and n from the data set Args: None Returns: float: the p value float: the n value """ self.n = len(self.data) self.p = 1.0 * sum(self.data) / len(self.data) self.mean = self.calculate_mean() self.stdev = self.calculate_stdev() def plot_bar(self): """Function to output a histogram of the instance variable data using matplotlib pyplot library. Args: None Returns: None """ plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n]) plt.title('Bar Chart of Data') plt.xlabel('outcome') plt.ylabel('count') def pdf(self, k): """Probability density function calculator for the gaussian distribution. Args: x (float): point for calculating the probability density function Returns: float: probability density function output """ a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k))) b = (self.p ** k) * (1 - self.p) ** (self.n - k) return a * b def plot_bar_pdf(self): """Function to plot the pdf of the binomial distribution Args: None Returns: list: x values for the pdf plot list: y values for the pdf plot """ x = [] y = [] # calculate the x values to visualize for i in range(self.n + 1): x.append(i) y.append(self.pdf(i)) # make the plots plt.bar(x, y) plt.title('Distribution of Outcomes') plt.ylabel('Probability') plt.xlabel('Outcome') plt.show() return x, y def __add__(self, other): """Function to add together two Binomial distributions with equal p Args: other (Binomial): Binomial instance Returns: Binomial: Binomial distribution """ try: assert self.p == other.p, 'p values are not equal' except AssertionError as error: raise result = Binomial() result.n = self.n + other.n result.p = self.p result.calculate_mean() result.calculate_stdev() return result def __repr__(self): """Function to output the characteristics of the Binomial instance Args: None Returns: string: characteristics of the Gaussian """ return "mean {}, standard deviation {}, p {}, n {}".\ format(self.mean, self.stdev, self.p, self.n)
PypiClean
/MatchZoo-test-1.0.tar.gz/MatchZoo-test-1.0/matchzoo/modules/attention.py
import typing import torch import torch.nn as nn import torch.nn.functional as F class Attention(nn.Module): """ Attention module. :param input_size: Size of input. :param mask: An integer to mask the invalid values. Defaults to 0. Examples: >>> import torch >>> attention = Attention(input_size=10) >>> x = torch.randn(4, 5, 10) >>> x.shape torch.Size([4, 5, 10]) >>> attention(x).shape torch.Size([4, 5]) """ def __init__(self, input_size: int = 100, mask: int = 0): """Attention constructor.""" super().__init__() self.linear = nn.Linear(input_size, 1, bias=False) self.mask = mask def forward(self, x): """Perform attention on the input.""" x = self.linear(x).squeeze(dim=-1) mask = (x != self.mask) x = x.masked_fill(mask == self.mask, -float('inf')) return F.softmax(x, dim=-1) class BidirectionalAttention(nn.Module): """Computing the soft attention between two sequence.""" def __init__(self): """Init.""" super().__init__() def forward(self, v1, v1_mask, v2, v2_mask): """Forward.""" similarity_matrix = v1.bmm(v2.transpose(2, 1).contiguous()) v2_v1_attn = F.softmax( similarity_matrix.masked_fill( v1_mask.unsqueeze(2), -1e-7), dim=1) v1_v2_attn = F.softmax( similarity_matrix.masked_fill( v2_mask.unsqueeze(1), -1e-7), dim=2) attended_v1 = v1_v2_attn.bmm(v2) attended_v2 = v2_v1_attn.transpose(1, 2).bmm(v1) attended_v1.masked_fill_(v1_mask.unsqueeze(2), 0) attended_v2.masked_fill_(v2_mask.unsqueeze(2), 0) return attended_v1, attended_v2 class MatchModule(nn.Module): """ Computing the match representation for Match LSTM. :param hidden_size: Size of hidden vectors. :param dropout_rate: Dropout rate of the projection layer. Defaults to 0. Examples: >>> import torch >>> attention = MatchModule(hidden_size=10) >>> v1 = torch.randn(4, 5, 10) >>> v1.shape torch.Size([4, 5, 10]) >>> v2 = torch.randn(4, 5, 10) >>> v2_mask = torch.ones(4, 5).to(dtype=torch.uint8) >>> attention(v1, v2, v2_mask).shape torch.Size([4, 5, 20]) """ def __init__(self, hidden_size, dropout_rate=0): """Init.""" super().__init__() self.v2_proj = nn.Linear(hidden_size, hidden_size) self.proj = nn.Linear(hidden_size * 4, hidden_size * 2) self.dropout = nn.Dropout(p=dropout_rate) def forward(self, v1, v2, v2_mask): """Computing attention vectors and projection vectors.""" proj_v2 = self.v2_proj(v2) similarity_matrix = v1.bmm(proj_v2.transpose(2, 1).contiguous()) v1_v2_attn = F.softmax( similarity_matrix.masked_fill( v2_mask.unsqueeze(1).bool(), -1e-7), dim=2) v2_wsum = v1_v2_attn.bmm(v2) fusion = torch.cat([v1, v2_wsum, v1 - v2_wsum, v1 * v2_wsum], dim=2) match = self.dropout(F.relu(self.proj(fusion))) return match
PypiClean