code
stringlengths 1
199k
|
|---|
from run import run_base
class run_names(run_base):
# Verify behavior when multiple --name options passed
def init_subargs(self):
cont = self.sub_stuff["cont"]
name_base = cont.get_unique_name()
names = []
for number in xrange(self.config['names_count']):
name = ('%s_%d' % (name_base, number))
names.append(name)
self.sub_stuff['containers'].append(name) # just in case
self.sub_stuff['subargs'] += ["--name %s" % name for name in names]
if self.config['last_name_sticks']:
self.sub_stuff['expected_name'] = names[-1]
else:
self.sub_stuff['expected_name'] = names[0]
super(run_names, self).init_subargs()
def run_once(self):
super(run_names, self).run_once()
cid = self.sub_stuff['cid'] = self.sub_stuff['dkrcmd'].stdout.strip()
self.sub_stuff['containers'].append(cid)
try:
self.sub_stuff["cont"].wait_by_long_id(cid)
except ValueError:
pass # container already finished and exited
def postprocess(self):
super(run_names, self).postprocess()
cont = self.sub_stuff["cont"]
json = cont.json_by_long_id(self.sub_stuff['cid'])
self.failif(len(json) == 0)
# docker sticks a "/" prefix on name (documented?)
actual_name = str(json[0]['Name'][1:])
self.failif(actual_name != self.sub_stuff['expected_name'],
"Actual name %s != expected name %s"
% (actual_name, self.sub_stuff['expected_name']))
|
import codecs
import collections
import json
import os
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import Qt # , SIGNAL
from PyQt4.QtGui import QDialog
from PyQt4.QtGui import QMessageBox
from brickv import config
from brickv.data_logger.event_logger import EventLogger, GUILogger
from brickv.data_logger.gui_config_handler import GuiConfigHandler
from brickv.data_logger.job import GuiDataJob
from brickv.data_logger.loggable_devices import Identifier
from brickv.data_logger.utils import Utilities
from brickv.device_dialog import LoggerDeviceDialog
from brickv.ui_logger_setup import Ui_Logger
class LoggerWindow(QDialog, Ui_Logger):
"""
Function and Event handling class for the Ui_Logger.
"""
def __init__(self, parent):
QDialog.__init__(self, parent)
self.setWindowFlags(Qt.Window | Qt.WindowCloseButtonHint)
self._gui_logger = GUILogger("GUILogger", EventLogger.EVENT_LOG_LEVEL)
self._gui_job = None
EventLogger.add_logger(self._gui_logger)
# FIXME better way to find interval and uids in tree_widget?!
self.__tree_interval_tooltip = "Interval in milliseconds"
self.__tree_uid_tooltip = "UID must be at least 3 Character long"
self.data_logger_thread = None
self.tab_console_warning = False
self.logger_device_dialog = None
# Code Inspector
self.host_infos = None
self.last_host = None
self.host_index_changing = None
# if self._table_widget is not None:#FIXME rework this like the console_tab <-- what does that mean?!
# self.jobs.append()
self.setupUi(self)
self.widget_initialization()
def widget_initialization(self):
"""
Sets default values for some widgets
"""
# Login data
self.host_info_initialization()
# GUI LOG LEVEL
self.combo_log_level_init(self.combo_console_level)
self.combo_console_level.setCurrentIndex(1) # INFO LEVEL
# set loglevel
self.combo_console_level_changed()
# LOGLEVEL FROM CONFIG
self.combo_log_level_init(self.combo_loglevel)
self.combo_loglevel.setCurrentIndex(0) # DEBUG LEVEL
self.signal_initialization()
def signal_initialization(self):
"""
Init of all important Signals and connections.
"""
# Buttons
self.btn_start_logging.clicked.connect(self.btn_start_logging_clicked)
self.btn_save_config.clicked.connect(self.btn_save_config_clicked)
self.btn_load_config.clicked.connect(self.btn_load_config_clicked)
self.btn_set_logfile.clicked.connect(self.btn_set_logfile_clicked)
self.btn_set_eventfile.clicked.connect(self.btn_set_eventfile_clicked)
self.btn_console_clear.clicked.connect(self.btn_console_clear_clicked)
self.combo_console_level.currentIndexChanged.connect(self.combo_console_level_changed)
self.btn_add_device.clicked.connect(self.btn_add_device_clicked)
self.btn_remove_device.clicked.connect(self.btn_remove_device_clicked)
self.btn_remove_all_devices.clicked.connect(self.btn_remove_all_devices_clicked)
self.tab_widget.currentChanged.connect(self.tab_reset_warning)
self.btn_clear_tabel.clicked.connect(self.btn_clear_table_clicked)
self.connect(self._gui_logger, QtCore.SIGNAL(GUILogger.SIGNAL_NEW_MESSAGE), self.txt_console_output)
self.connect(self._gui_logger, QtCore.SIGNAL(GUILogger.SIGNAL_NEW_MESSAGE_TAB_HIGHLIGHT),
self.txt_console_highlight_tab)
# login information
self.combo_host.currentIndexChanged.connect(self._host_index_changed)
self.spinbox_port.valueChanged.connect(self._port_changed)
self.checkbox_xively.stateChanged.connect(self.cb_xively_changed)
self.tree_devices.itemDoubleClicked.connect(self.tree_on_double_click)
self.tree_devices.itemChanged.connect(self.tree_on_change)
def combo_log_level_init(self, combo_widget):
combo_widget.clear()
od = collections.OrderedDict(sorted(GUILogger._convert_level.items()))
for k in od.keys():
combo_widget.addItem(od[k])
# TODO dynamic way to set GUI LogLevel - not used at the moment!
# set index
# ll = GUILogger._convert_level[EventLogger.EVENT_LOG_LEVEL]
# combo_widget_count = combo_widget.count()
# for i in range(0, combo_widget_count):
# if ll == combo_widget.itemText(i):
# combo_widget.setCurrentIndex(i)
# break
def host_info_initialization(self):
"""
initialize host by getting information out of brickv.config
"""
self.host_infos = config.get_host_infos(config.HOST_INFO_COUNT)
self.host_index_changing = True
for host_info in self.host_infos:
self.combo_host.addItem(host_info.host)
self.last_host = None
self.combo_host.setCurrentIndex(0)
self.spinbox_port.setValue(self.host_infos[0].port)
self.host_index_changing = False
def btn_start_logging_clicked(self):
"""
Start/Stop of the logging process
"""
if (self.data_logger_thread is not None) and (not self.data_logger_thread.stopped):
self.btn_start_logging.clicked.disconnect()
self.data_logger_thread.stop()
self._reset_stop()
elif self.data_logger_thread is None:
from data_logger import main
arguments_map = {}
arguments_map[main.GUI_CONFIG] = GuiConfigHandler.create_config_file(self)
self._gui_job = GuiDataJob(name="GuiData-Writer")
self.connect(self._gui_job, QtCore.SIGNAL(GuiDataJob.SIGNAL_NEW_DATA), self.table_add_row)
arguments_map[main.GUI_ELEMENT] = self._gui_job
self.data_logger_thread = main.main(arguments_map)
if self.data_logger_thread is not None:
self.btn_start_logging.setText("Stop Logging")
self.tab_devices.setEnabled(False)
self.tab_setup.setEnabled(False)
# self.tab_xively.setEnabled(False)#nyi
self.tab_widget.setCurrentIndex(self.tab_widget.indexOf(self.tab_console))
self.tab_reset_warning()
def _reset_stop(self):
self.tab_devices.setEnabled(True)
self.tab_setup.setEnabled(True)
# self.tab_xively.setEnabled(True)#nyi
self.btn_start_logging.setText("Start Logging")
self.disconnect(self._gui_job, QtCore.SIGNAL(GuiDataJob.SIGNAL_NEW_DATA), self.table_add_row)
self.data_logger_thread = None
self._gui_job = None
self.btn_start_logging.clicked.connect(self.btn_start_logging_clicked)
def btn_save_config_clicked(self):
"""
Opens a FileSelectionDialog and saves the current config.
"""
conf = GuiConfigHandler.create_config_file(self)
fn = QtGui.QFileDialog.getSaveFileName(self, 'Save Config-File', os.getcwd(), filter='*.json')
if fn == "":
# cancel
EventLogger.debug("Cancelled load Config.")
return
try:
with open(fn, 'w') as outfile:
json.dump(conf, outfile, sort_keys=True, indent=2)
except Exception as e1:
EventLogger.warning("Load Config - Exception: " + str(e1))
QMessageBox.warning(self, 'Error',
'Could not save the Config-File! Look at the Log-File for further information.',
QMessageBox.Ok)
return
QMessageBox.information(self, 'Success', 'Config-File saved!', QMessageBox.Ok)
EventLogger.info("Config-File saved to: " + str(fn))
def btn_load_config_clicked(self):
"""
Opens a FileSelectionDialog and loads the selected config.
"""
fn = QtGui.QFileDialog.getOpenFileName(self, "Open Config-File...", os.getcwd(), "JSON-Files (*.json)")
if fn == "":
# cancel
EventLogger.debug("Cancelled save Config.")
return
config_json = None
try:
with codecs.open(fn, 'r', 'UTF-8') as content_file:
try:
config_json = json.load(content_file)
except ValueError as e:
EventLogger.warning("Load Config - Cant parse the configuration file: " + str(e))
except Exception as e1:
EventLogger.warning("Load Config - Exception: " + str(e1))
return
EventLogger.info("Loaded Config-File from: " + str(fn))
# devices
config_blueprint = GuiConfigHandler.load_devices(config_json)
if config_blueprint is None:
return
self.create_tree_items(config_blueprint)
# general_section
from brickv.data_logger.configuration_validator import ConfigurationReader
self.update_setup_tab(config_json[ConfigurationReader.GENERAL_SECTION])
# TODO add other information
# xively
def btn_set_logfile_clicked(self):
"""
Opens a FileSelectionDialog and sets the selected path for the data output file.
"""
fn = self.__choose_file_dialog('Choose Config Destination', "CSV-Files (*.csv);;Text-Files (*.txt)")
if fn == "":
# cancel
self.line_path_to_file.setText("")
EventLogger.debug("Cancelled select Config-File-Path.")
return
self.line_path_to_file.setText(fn)
def btn_set_eventfile_clicked(self):
"""
Opens a FileSelectionDialog and sets the selected path for the event output file.
"""
fn = self.__choose_file_dialog('Choose Eventfile destination', "Log-Files (*.log)")
if fn == "":
# cancel
EventLogger.debug("Cancelled select Eventfile-Path.")
return
self.line_path_to_eventfile.setText(fn)
def __choose_file_dialog(self, msg, filter_string):
return QtGui.QFileDialog.getSaveFileName(self, msg, os.getcwd(), filter_string)
def btn_add_device_clicked(self):
"""
Opens the DeviceDialog in Add-Mode.
"""
if self.logger_device_dialog is None:
self.logger_device_dialog = LoggerDeviceDialog(self)
# blueprint = Identifier.DEVICE_DEFINITIONS
self.logger_device_dialog.init_dialog(self)
self.logger_device_dialog.show()
def btn_remove_device_clicked(self):
"""
Removes selected Device
"""
selected_item = self.tree_devices.selectedItems()
for index in range(0, len(selected_item)):
try:
if selected_item[index] is None:
continue
device_name = selected_item[index].text(0)
device_id = selected_item[index].text(1)
if selected_item[index].text(0) not in Identifier.DEVICE_DEFINITIONS:
# have to find the parent
current_item = selected_item[0]
while True:
if current_item.parent() is None:
if current_item.text(0) not in Identifier.DEVICE_DEFINITIONS:
EventLogger.error("Cant remove device: " + selected_item[index].text(0))
device_name = ""
device_id = ""
break
else:
device_name = current_item.text(0)
device_id = current_item.text(1)
break
else:
current_item = current_item.parent()
self.remove_item_from_tree(device_name, device_id)
except Exception as e:
if not str(e).startswith("wrapped C/C++ object"):
EventLogger.error("Cant remove device: " + str(e)) # was already removed
def btn_remove_all_devices_clicked(self):
self.tree_devices.clear()
def btn_clear_table_clicked(self):
"""
Clears the Data table.
"""
self.table_widget.setRowCount(0)
def tab_reset_warning(self):
"""
Resets the Warning @ the console tab.
"""
if not self.tab_console_warning or self.tab_widget.currentWidget().objectName() != self.tab_console.objectName():
return
self.tab_console_warning = False
from PyQt4.QtGui import QColor
self.tab_set(self.tab_widget.indexOf(self.tab_console), QColor(0, 0, 0), None)
def combo_console_level_changed(self):
"""
Changes the log level dynamically.
"""
ll = self.combo_console_level.currentText()
od = collections.OrderedDict(sorted(self._gui_logger._convert_level.items()))
for k in od.keys():
if ll == od[k]:
self._gui_logger.level = k
break
def tab_set(self, tab_index, color, icon=None):
"""
Sets the font Color and an icon, if given, at a specific tab.
"""
from PyQt4.QtGui import QIcon
self.tab_widget.tabBar().setTabTextColor(tab_index, color)
if icon is not None:
self.tab_widget.setTabIcon(tab_index, QIcon(icon))
else:
self.tab_widget.setTabIcon(tab_index, QIcon())
def btn_console_clear_clicked(self):
"""
Clears the gui console tab.
"""
self.txt_console.clear()
def _host_index_changed(self, i):
"""
Persists host information changes like in brickv.mainwindow
Changes port if the host was changed
"""
if i < 0:
return
self.host_index_changing = True
self.spinbox_port.setValue(self.host_infos[i].port)
self.host_index_changing = False
def _port_changed(self, value):
"""
Persists host information changes like in brickv.mainwindow
"""
if self.host_index_changing:
return
i = self.combo_host.currentIndex()
if i < 0:
return
self.host_infos[i].port = self.spinbox_port.value()
def cb_xively_changed(self):
"""
Enables/Disables widgets for xively configuration
"""
if self.checkbox_xively.isChecked():
self.groupBox_xively.setEnabled(True)
else:
self.groupBox_xively.setEnabled(False)
def update_setup_tab(self, general_section):
"""
Update the information of the setup tab with the given general_section.
"""
from brickv.data_logger.configuration_validator import ConfigurationReader
try:
# host combo_host setEditText(String)
self.combo_host.setEditText(general_section[ConfigurationReader.GENERAL_HOST])
# port spinbox_port setValue(int)
self.spinbox_port.setValue(general_section[ConfigurationReader.GENERAL_PORT])
# file_count spin_file_count setValue(int)
self.spin_file_count.setValue(general_section[ConfigurationReader.GENERAL_LOG_COUNT])
# file_size spin_file_size setValue(int/1024/1024) (Byte -> MB)
self.spin_file_size.setValue((general_section[ConfigurationReader.GENERAL_LOG_FILE_SIZE] / 1024.0 / 1024.0))
# path_to_file line_path_to_file setText(string)
self.line_path_to_file.setText(general_section[ConfigurationReader.GENERAL_PATH_TO_FILE])
# logfile path
self.line_path_to_eventfile.setText(general_section[ConfigurationReader.GENERAL_EVENTLOG_PATH])
# loglevel
ll = general_section[ConfigurationReader.GENERAL_EVENTLOG_LEVEL]
od = collections.OrderedDict(sorted(GUILogger._convert_level.items()))
counter = 0 # TODO better way to set the combo box index?
for k in od.keys():
if ll == k:
break
counter += 1
self.combo_loglevel.setCurrentIndex(counter)
# log_to_console
def __checkbox_bool_setter(bool_value):
if bool_value:
return 2
else:
return 0
self.checkbox_to_file.setChecked(
__checkbox_bool_setter(general_section[ConfigurationReader.GENERAL_EVENTLOG_TO_FILE]))
# log_to_file
self.checkbox_to_console.setCheckState(
__checkbox_bool_setter(general_section[ConfigurationReader.GENERAL_EVENTLOG_TO_CONSOLE]))
except Exception as e:
EventLogger.critical("Could not read the General Section of the Config-File! -> " + str(e))
return
def create_tree_items(self, blueprint):
"""
Create the device tree with the given blueprint.
Shows all possible devices, if the view_all Flag is True.
"""
self.tree_devices.clear()
self.tree_devices.setSortingEnabled(False)
try:
for dev in blueprint:
self.__add_item_to_tree(dev)
EventLogger.debug("Device Tree created.")
except Exception as e:
EventLogger.warning("DeviceTree - Exception while creating the Tree: " + str(e))
self.tree_devices.sortItems(0, QtCore.Qt.AscendingOrder)
self.tree_devices.setSortingEnabled(True)
def add_item_to_tree(self, item_blueprint):
self.tree_devices.setSortingEnabled(False)
self.__add_item_to_tree(item_blueprint)
self.tree_devices.sortItems(0, QtCore.Qt.AscendingOrder)
self.tree_devices.setSortingEnabled(True)
def __add_item_to_tree(self, item_blueprint):
"""
Private function with NO sort = false
:param item_blueprint:
:return:
"""
# counts topLevelItems
lv0_counter = self.tree_devices.topLevelItemCount()
# counts values in devices
value_counter = 0
# lvl0: new entry(name|UID)
item_0 = QtGui.QTreeWidgetItem(self.tree_devices)
item_0.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled)
# set name|UID
self.tree_devices.topLevelItem(lv0_counter).setText(0, str(item_blueprint[Identifier.DD_NAME]))
self.tree_devices.topLevelItem(lv0_counter).setText(1, str(item_blueprint[Identifier.DD_UID]))
self.tree_devices.topLevelItem(lv0_counter).setToolTip(1, self.__tree_uid_tooltip)
for item_value in item_blueprint[Identifier.DD_VALUES]:
# lvl1: new entry(value_name|interval)
item_1 = QtGui.QTreeWidgetItem(item_0)
item_1.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled)
interval = item_blueprint[Identifier.DD_VALUES][item_value][Identifier.DD_VALUES_INTERVAL]
self.tree_devices.topLevelItem(lv0_counter).child(value_counter).setText(0, str(item_value))
self.tree_devices.topLevelItem(lv0_counter).child(value_counter).setText(1, str(interval))
self.tree_devices.topLevelItem(lv0_counter).child(value_counter).setToolTip(1, self.__tree_interval_tooltip)
# check sub_values
sub_values = item_blueprint[Identifier.DD_VALUES][item_value][Identifier.DD_SUBVALUES]
if sub_values is not None:
# counts sub values in devices
sub_value_counter = 0
for item_sub_value in sub_values:
# lvl2: new entry (sub_value_name|True/False)
item_2 = QtGui.QTreeWidgetItem(item_1)
item_2.setFlags(
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled)
lvl2_item = self.tree_devices.topLevelItem(lv0_counter).child(value_counter).child(
sub_value_counter)
item_sub_value_value = \
item_blueprint[Identifier.DD_VALUES][item_value][Identifier.DD_SUBVALUES][
item_sub_value]
lvl2_item.setText(0, str(item_sub_value))
if item_sub_value_value:
lvl2_item.setCheckState(1, QtCore.Qt.Checked)
else:
lvl2_item.setCheckState(1, QtCore.Qt.Unchecked)
lvl2_item.setText(1, "")
sub_value_counter += 1
value_counter += 1
def remove_item_from_tree(self, item_name, item_uid):
"""
Removes an item from the device tree. The first match is removed!
"""
# remove first found match!
# removed_item = False
t0_max = self.tree_devices.topLevelItemCount()
for t0 in range(0, t0_max):
dev_name = self.tree_devices.topLevelItem(t0).text(0)
dev_uid = self.tree_devices.topLevelItem(t0).text(1)
if dev_name == item_name and dev_uid == item_uid:
# removed_item = True
self.tree_devices.takeTopLevelItem(t0)
break
# can't use this approach because of multiple selection in tree_devices
# if not removed_item:
# QMessageBox.information(self, 'No Device found?', 'No Device was not found and could not be deleted!', QMessageBox.Ok)
def tree_on_change(self, item, column):
# check for wrong input number in interval or uid
if column == 1:
# check if tooltip is set
tt = str(item.toolTip(1))
if tt != "":
# check if tooltip is interval
if tt == self.__tree_interval_tooltip:
item.setText(1, str(Utilities.parse_to_int(item.text(1))))
# check if tooltip is uid
elif tt == self.__tree_uid_tooltip:
text = item.text(1)
if not Utilities.is_valid_string(text, 3):
text = Identifier.DD_UID_DEFAULT
item.setText(1, text)
def tree_on_double_click(self, item, column):
"""
Is called, when a cell in the tree was doubleclicked.
Is used to allow the changing of the interval
numbers and UID's but not empty cells.
"""
edit_flag = (
QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled)
non_edit_flag = (QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsEnabled)
if column == 0:
item.setFlags(non_edit_flag)
elif item.text(column) != "" or item.text(column) is None:
item.setFlags(edit_flag)
def txt_console_output(self, msg):
"""
SIGNAL function:
Function to write text on the gui console tab.
"""
self.txt_console.append(str(msg))
if self.checkbox_console_auto_scroll.isChecked():
self.txt_console.moveCursor(QtGui.QTextCursor.End)
def txt_console_highlight_tab(self):
"""
SIGNAL function:
Highlight the console/message tab when an error occurs.
"""
if not self.tab_console_warning and self.tab_widget.currentWidget().objectName() != self.tab_console.objectName():
self.tab_console_warning = True
from brickv.utils import get_resources_path
from PyQt4.QtGui import QColor
self.tab_set(self.tab_widget.indexOf(self.tab_console), QColor(255, 0, 0),
os.path.join(get_resources_path(), "dialog-warning.png"))
def table_add_row(self, csv_data):
"""
SIGNAL function:
Adds new CSV Data into the Table.
"""
# disable sort
self.table_widget.setSortingEnabled(False)
row = self.table_widget.rowCount()
self.table_widget.insertRow(row)
self.table_widget.setItem(row, 0, QtGui.QTableWidgetItem(str(csv_data.uid)))
self.table_widget.setItem(row, 1, QtGui.QTableWidgetItem(str(csv_data.name)))
self.table_widget.setItem(row, 2, QtGui.QTableWidgetItem(str(csv_data.var_name)))
self.table_widget.setItem(row, 3, QtGui.QTableWidgetItem(str(csv_data.raw_data)))
self.table_widget.setItem(row, 4, QtGui.QTableWidgetItem(str(csv_data.timestamp)))
if self.checkbox_data_auto_scroll.isChecked():
self.table_widget.scrollToBottom()
# enable sort
self.table_widget.setSortingEnabled(True)
|
from ahkab.testing import NetlistTest
from ahkab import options
options.plotting_show_plots = False
def test():
nt = NetlistTest('ekv1')
nt.setUp()
nt.test()
nt.tearDown()
test.__doc__ = "EKV NMOS DC sweep"
if __name__ == '__main__':
nt = NetlistTest('ekv1')
nt.setUp()
nt.test()
|
"""
Simple class wrappers for the various external commands needed by
git-buildpackage and friends
"""
import subprocess
import os
import signal
import sys
from contextlib import contextmanager
from tempfile import TemporaryFile
import gbp.log as log
class CommandExecFailed(Exception):
"""Exception raised by the Command class"""
pass
@contextmanager
def proxy_stdf():
"""
Circulate stdout/stderr via a proper file object. Designed to work around a
problem where Python nose replaces sys.stdout/stderr with a custom 'Tee'
object that is not a file object (compatible) and thus causes a crash with
Popen.
"""
tmp_stdout = sys.stdout
try:
tmp_stdout.fileno()
except Exception:
tmp_stdout = TemporaryFile()
tmp_stderr = sys.stderr
try:
tmp_stderr.fileno()
except Exception:
tmp_stderr = TemporaryFile()
try:
yield tmp_stdout, tmp_stderr
finally:
if tmp_stdout != sys.stdout:
tmp_stdout.seek(0)
sys.stdout.write(tmp_stdout.read().decode())
if tmp_stderr != sys.stderr:
tmp_stderr.seek(0)
sys.stderr.write(tmp_stderr.read().decode())
class Command(object):
"""
Wraps a shell command, so we don't have to store any kind of command
line options in one of the git-buildpackage commands
Note that it does not do any shell quoting even with shell=True so
you have to quote arguments yourself if necessary.
If cmd doesn't contain a path component it will be looked up in $PATH.
"""
def __init__(self, cmd, args=[], shell=False, extra_env=None, cwd=None,
capture_stderr=False,
capture_stdout=False):
self.cmd = cmd
self.args = args
self.run_error = self._f("'%s' failed: {err_reason}",
(" ".join([self.cmd] + self.args)))
self.shell = shell
self.capture_stdout = capture_stdout
self.capture_stderr = capture_stderr
self.cwd = cwd
if extra_env is not None:
self.env = os.environ.copy()
self.env.update(extra_env)
else:
self.env = None
self._reset_state()
@staticmethod
def _f(format, *args):
"""Build error string template
'%' expansion is performed while curly braces in args are
quoted so we don't accidentally try to expand them when
printing an error message later that uses one of our
predefined error variables stdout, stderr, stderr_or_reason
and self.err_reason.
>>> Command._f("foo %s", "bar")
'foo bar'
>>> Command._f("{foo} %s %s", "bar", "baz")
'{foo} bar baz'
>>> Command._f("{foo} bar")
'{foo} bar'
"""
def _q(arg):
return arg.replace('{', '{{').replace('}', '}}')
return format % tuple([_q(arg) for arg in args])
def _reset_state(self):
self.retcode = 1
self.stdout, self.stderr, self.err_reason = [''] * 3
def __call(self, args):
"""
Wraps subprocess.call so we can be verbose and fix Python's
SIGPIPE handling
"""
def default_sigpipe():
"Restore default signal handler (http://bugs.python.org/issue1652)"
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
log.debug("%s %s %s" % (self.cmd, self.args, args))
self._reset_state()
cmd = [self.cmd] + self.args + args
if self.shell:
# subprocess.call only cares about the first argument if shell=True
cmd = " ".join(cmd)
with proxy_stdf() as (stdout, stderr):
stdout_arg = subprocess.PIPE if self.capture_stdout else stdout
stderr_arg = subprocess.PIPE if self.capture_stderr else stderr
try:
popen = subprocess.Popen(cmd,
cwd=self.cwd,
shell=self.shell,
env=self.env,
preexec_fn=default_sigpipe,
stdout=stdout_arg,
stderr=stderr_arg)
(self.stdout, self.stderr) = popen.communicate()
if self.stdout is not None:
self.stdout = self.stdout.decode()
if self.stderr is not None:
self.stderr = self.stderr.decode()
except OSError as err:
self.err_reason = "execution failed: %s" % str(err)
self.retcode = 1
raise
self.retcode = popen.returncode
if self.retcode < 0:
self.err_reason = "it was terminated by signal %d" % -self.retcode
elif self.retcode > 0:
self.err_reason = "it exited with %d" % self.retcode
return self.retcode
def _log_err(self):
"Log an error message"
log.err(self._format_err())
def _format_err(self):
"""Log an error message
This allows to replace stdout, stderr and err_reason in
the self.run_error.
"""
stdout = self.stdout.rstrip() if self.stdout else self.stdout
stderr = self.stderr.rstrip() if self.stderr else self.stderr
stderr_or_reason = self.stderr.rstrip() if self.stderr else self.err_reason
return self.run_error.format(stdout=stdout,
stderr=stderr,
stderr_or_reason=stderr_or_reason,
err_reason=self.err_reason)
def __call__(self, args=[], quiet=False):
"""Run the command and raise exception on errors
If run quietly it will not print an error message via the
L{gbp.log} logging API.
Whether the command prints anything to stdout/stderr depends on
the I{capture_stderr}, I{capture_stdout} instance variables.
All errors will be reported as subclass of the
L{CommandExecFailed} exception including a non zero exit
status of the run command.
@param args: additional command line arguments
@type args: C{list} of C{strings}
@param quiet: don't log failed execution to stderr. Mostly useful during
unit testing
@type quiet: C{bool}
>>> Command("/bin/true")(["foo", "bar"])
>>> Command("/foo/bar")(quiet=True) # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
gbp.command_wrappers.CommandExecFailed
"""
try:
ret = self.__call(args)
except OSError:
ret = 1
if ret:
if not quiet:
self._log_err()
raise CommandExecFailed(self._format_err())
def call(self, args, quiet=True):
"""Like L{__call__} but let the caller handle the return status.
Only raise L{CommandExecFailed} if we failed to launch the command
at all (i.e. if it does not exist) not if the command returned
nonzero.
Logs errors using L{gbp.log} by default.
@param args: additional command line arguments
@type args: C{list} of C{strings}
@param quiet: don't log failed execution to stderr. Mostly useful during
unit testing
@type quiet: C{bool}
@returns: the exit status of the run command
@rtype: C{int}
>>> Command("/bin/true").call(["foo", "bar"])
0
>>> Command("/foo/bar").call(["foo", "bar"]) # doctest:+ELLIPSIS
Traceback (most recent call last):
...
gbp.command_wrappers.CommandExecFailed: execution failed: ...
>>> c = Command("/bin/true", capture_stdout=True,
... extra_env={'LC_ALL': 'C'})
>>> c.call(["--version"])
0
>>> c.stdout.startswith('true')
True
>>> c = Command("/bin/false", capture_stdout=True,
... extra_env={'LC_ALL': 'C'})
>>> c.call(["--help"])
1
>>> c.stdout.startswith('Usage:')
True
"""
ret = 1
try:
ret = self.__call(args)
except OSError:
raise CommandExecFailed(self.err_reason)
finally:
if ret and not quiet:
self._log_err()
return ret
class RunAtCommand(Command):
"""Run a command in a specific directory"""
def __call__(self, dir='.', *args):
curdir = os.path.abspath(os.path.curdir)
try:
os.chdir(dir)
Command.__call__(self, list(*args))
finally:
os.chdir(curdir)
class UnpackTarArchive(Command):
"""Wrap tar to unpack a compressed tar archive"""
def __init__(self, archive, dir, filters=[], compression=None):
self.archive = archive
self.dir = dir
exclude = [("--exclude=%s" % _filter) for _filter in filters]
if not compression:
compression = '-a'
Command.__init__(self, 'tar', exclude +
['-C', dir, compression, '-xf', archive])
self.run_error = self._f("Couldn't unpack '%s': {err_reason}", self.archive)
class PackTarArchive(Command):
"""Wrap tar to pack a compressed tar archive"""
def __init__(self, archive, dir, dest, filters=[], compression=None):
self.archive = archive
self.dir = dir
exclude = [("--exclude=%s" % _filter) for _filter in filters]
if not compression:
compression = '-a'
Command.__init__(self, 'tar', exclude +
['-C', dir, compression, '-cf', archive, dest])
self.run_error = self._f("Couldn't repack '%s': {err_reason}", self.archive)
class CatenateTarArchive(Command):
"""Wrap tar to catenate a tar file with the next"""
def __init__(self, archive, **kwargs):
self.archive = archive
Command.__init__(self, 'tar', ['-A', '-f', archive], **kwargs)
def __call__(self, target):
Command.__call__(self, [target])
class RemoveTree(Command):
"Wrap rm to remove a whole directory tree"
def __init__(self, tree):
self.tree = tree
Command.__init__(self, 'rm', ['-rf', tree])
self.run_error = self._f("Couldn't remove '%s': {err_reason}", self.tree)
class DpkgSourceExtract(Command):
"""
Wrap dpkg-source to extract a Debian source package into a certain
directory
"""
def __init__(self):
Command.__init__(self, 'dpkg-source', ['-x'])
def __call__(self, dsc, output_dir):
self.run_error = self._f("Couldn't extract '%s': {err_reason}", dsc)
Command.__call__(self, [dsc, output_dir])
class UnpackZipArchive(Command):
"""Wrap zip to Unpack a zip file"""
def __init__(self, archive, dir):
self.archive = archive
self.dir = dir
Command.__init__(self, 'unzip', ["-q", archive, '-d', dir])
self.run_error = self._f("Couldn't unpack '%s': {err_reason}", self.archive)
class CatenateZipArchive(Command):
"""Wrap zipmerge tool to catenate a zip file with the next"""
def __init__(self, archive, **kwargs):
self.archive = archive
Command.__init__(self, 'zipmerge', [archive], **kwargs)
def __call__(self, target):
self.run_error = self._f("Couldn't append '%s' to '%s': {err_reason}",
target, self.archive)
Command.__call__(self, [target])
class GitCommand(Command):
"Mother/Father of all git commands"
def __init__(self, cmd, args=[], **kwargs):
Command.__init__(self, 'git', [cmd] + args, **kwargs)
self.run_error = self._f("Couldn't run git %s: {err_reason}", cmd)
|
from service import Main
Main().get_params
|
import os
import subprocess
from collections import OrderedDict, defaultdict
from .step import Step
from .exceptions import *
from ..genomic_io.fastq import FastqFile
from ..genomic_io.fasta import FastaFile, FastaEntry
from ..genomic_io.functions import make_fasta_from_fastq
from ..annotation.intron import get_intron_sequences
from ..settings import *
import pysam
class BpReference(Step):
'''
To be completed
'''
def __init__(self, name, input_files, output_directory,
executable='', executable_arguments = '' , number_of_nucleotides = 150):
super().__init__(name, [], output_directory, executable, executable_arguments)
self.genome_fasta_file = input_files[0]
self.bp_bed_file = input_files[1]
self.bp_fasta_file = os.path.join(self.output_directory, "bp_sequences.fa")
self.number_of_nucleotides = number_of_nucleotides
self.reference_base = os.path.join(self.output_directory, settings['bp_reference_base'])
###################################################################
def prepare(self):
self.get_bp_sequences()
if not os.path.isfile(self.bp_fasta_file):
raise StepError("There was a problem in getting the bp sequences."
"BP reference file %s doesn't exist."%self.bp_fasta_file)
self.command = " ".join( [ self.executable, self.executable_arguments,
self.bp_fasta_file, self.reference_base ] )
##############################################################################
def get_bp_sequences(self):
with open(self.bp_bed_file, "r") as bed_input,\
FastaFile(self.genome_fasta_file) as genome_input,\
open(self.bp_fasta_file, "w") as bp_output:
# first read the bed file into a dict grouped by chromosome
bps_by_chr = defaultdict(list)
for bp_entry in bed_input:
bp_contents = bp_entry.rstrip().split("\t")
bp_chr = bp_contents[0]
bps_by_chr[bp_chr].append(bp_entry)
# Then go through the fasta file and get the sequences
for chr_entry in genome_input:
this_chr = chr_entry.header
for branchpoint in bps_by_chr[this_chr]:
bp_contents = branchpoint.rstrip().split("\t")
bp_location = int(bp_contents[1])
bp_header_contents = bp_contents[3].split(settings['field_separator'])
five_prime_location = int(bp_header_contents[3])
this_sequence = ''
if bp_contents[5] == '+':
bp_fragment_start = bp_location - self.number_of_nucleotides
if bp_fragment_start < 0 :
bp_fragment_start = 0
this_sequence = chr_entry.sequence[ bp_fragment_start : bp_location + 1 ] +\
chr_entry.sequence[ five_prime_location : five_prime_location +\
self.number_of_nucleotides ]
elif bp_contents[5] == '-':
bp_fragment_raw = chr_entry.sequence[ bp_location :\
bp_location + self.number_of_nucleotides + 1 ]
five_p_fragment_start = five_prime_location - self.number_of_nucleotides
five_p_fragment_raw = chr_entry.sequence[ five_p_fragment_start + 1 :\
five_prime_location + 1 ]
bp_fragment_raw_fasta = FastaEntry('bp' , bp_fragment_raw)
bp_fragment_raw_fasta.reverse_complement()
five_p_fragment_raw_fasta = FastaEntry('five_p' , five_p_fragment_raw )
five_p_fragment_raw_fasta.reverse_complement()
this_sequence = bp_fragment_raw_fasta.sequence + five_p_fragment_raw_fasta.sequence
else:
raise(StepError("Invalid strand type:", bp_contents[5]))
this_bp_sequence_entry = FastaEntry(bp_contents[3], this_sequence)
print(this_bp_sequence_entry, file = bp_output)
###############################################################################
###############################################################################
###############################################################################
def post_run(self):
missing_references = list()
suffixes = ('.1.bt2', '.2.bt2', '.3.bt2', '.4.bt2', '.rev.1.bt2', '.rev.2.bt2')
error_messages = list()
for suffix in suffixes:
if (not os.path.isfile(self.reference_base + suffix) ) :
missing_references.append("Couldn't find the bowtie2 reference: " + self.reference_base + suffix)
if len(missing_references) > 0:
error_messages.append("Couldn't find the following bowtie2 reference(s):\n" +\
"\n".join(missing_references))
if len(error_messages) > 0:
subprocess.call('touch ' + self.failure_file , shell=True )
else:
subprocess.call('touch ' + self.success_file , shell=True )
self.error_messages = error_messages
|
""" Tests for general functionality of the KGML parser, pathway and
visualisation modules
"""
from __future__ import with_statement
import os
import unittest
from Bio.Graphics.ColorSpiral import ColorSpiral
from Bio import MissingExternalDependencyError
try:
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import A4
except ImportError:
raise MissingExternalDependencyError(
"Install reportlab if you want to use Bio.Graphics.")
try:
from PIL import Image
except ImportError:
raise MissingExternalDependencyError(
"Install Pillow or its predecessorPIL (Python Imaging Library) "
"if you want to use bitmaps from KGML.")
from Bio.KEGG.KGML.KGML_parser import read
from Bio.Graphics.KGML_vis import KGMLCanvas
class PathwayData(object):
""" Convenience structure for testing pathway data
"""
def __init__(self, name, element_counts, show_pathway_image=False):
self.infilename = os.path.join("KEGG", "ko%s.xml" % name)
self.outfilename = os.path.join("KEGG", "ko%s.kgml" % name)
self.element_counts = element_counts
self.pathway_image = os.path.join("KEGG", "map%s.png" % name)
self.show_pathway_image = show_pathway_image
self.output_stem = "Graphics/map%s" % name
class KGMLPathwayTest(unittest.TestCase):
""" Import the ko01100 metabolic map from a local .xml KGML file, and from
the KEGG site, and write valid KGML output for each
"""
def setUp(self):
# Does our output director exist? If not, create it
if not os.path.isdir('KEGG'):
os.mkdir('KEGG')
# Define some data to work with as a list of tuples:
# (infilename, outfilename, (entry_count, ortholog_count,
# compound_count, map_counts), pathway_image,
# show_image_map)
self.data = [
PathwayData("01100", (3628, 1726, 1746, 149)),
PathwayData("03070", (81, 72, 8, 1), True),
]
# A list of KO IDs that we're going to use to modify pathway
# appearance. These are KO IDs for reactions that take part in ko00020,
# the TCA cycle
self.ko_ids = \
set(['ko:K00239','ko:K00240','ko:K00241','ko:K00242','ko:K00244',
'ko:K00245','ko:K00246','ko:K00247','ko:K00174','ko:K00175',
'ko:K00177','ko:K00176','ko:K00382','ko:K00164','ko:K00164',
'ko:K00658','ko:K01902','ko:K01903','ko:K01899','ko:K01900',
'ko:K01899','ko:K01900','ko:K00031','ko:K00030','ko:K00031',
'ko:K01648','ko:K00234','ko:K00235','ko:K00236','ko:K00237',
'ko:K01676','ko:K01677','ko:K01678','ko:K01679','ko:K01681',
'ko:K01682','ko:K01681','ko:K01682','ko:K01647','ko:K00025',
'ko:K00026','ko:K00024','ko:K01958','ko:K01959','ko:K01960',
'ko:K00163','ko:K00161','ko:K00162','ko:K00163','ko:K00161',
'ko:K00162','ko:K00382','ko:K00627','ko:K00169','ko:K00170',
'ko:K00172','ko:K00171','ko:K01643','ko:K01644','ko:K01646',
'ko:K01610','ko:K01596'])
def test_render_KGML_basic(self):
""" Basic rendering of KGML: write to PDF without modification.
"""
# We test rendering of both the original KEGG KGML using only local
# files.
for p in self.data:
with open(p.infilename, 'rU') as f:
pathway = read(f)
pathway.image = p.pathway_image
kgml_map = KGMLCanvas(pathway)
kgml_map.import_imagemap = p.show_pathway_image
kgml_map.draw(p.output_stem + '_original.pdf')
def test_render_KGML_modify(self):
""" Rendering of KGML to PDF, with modification.
"""
# We test rendering of the original KGML for KO01100,
# modifying line width for the lipid pathway
p = self.data
with open(p[0].infilename) as f:
pathway = read(f)
mod_rs = [e for e in pathway.orthologs if
len(set(e.name.split()).intersection(self.ko_ids))]
for r in mod_rs:
for g in r.graphics:
g.width = 10
kgml_map = KGMLCanvas(pathway)
kgml_map.draw(p[0].output_stem + '_widths.pdf')
# We test rendering of the original KGML for KO3070,
# modifying the reaction colours for each ortholog entry
with open(p[1].infilename) as f:
pathway = read(f)
orthologs = [e for e in pathway.orthologs]
# Use Biopython's ColorSpiral to generate colours
cs = ColorSpiral(a=2, b=0.2, v_init=0.85, v_final=0.5,
jitter=0.03)
colors = cs.get_colors(len(orthologs))
for o, c in zip(orthologs, colors):
for g in o.graphics:
g.bgcolor = c
kgml_map = KGMLCanvas(pathway)
pathway.image = p[1].pathway_image
kgml_map.import_imagemap = p[1].show_pathway_image
kgml_map.draw(p[1].output_stem + '_colors.pdf')
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
|
"""
Django settings for tango_with_django_project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
SECRET_KEY = 'q=p=x-9tau%%)nvuz17l9^n1mdat)4e!(sjx$sgavu(s*p=+=-'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rango',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tango_with_django_project.urls'
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
DATABASE_PATH = os.path.join(BASE_DIR, 'rango.db')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': DATABASE_PATH,
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_PATH = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
TEMPLATE_DIRS= (
TEMPLATE_PATH,
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
LOGIN_URL = '/rango/login/'
|
"""
Classes and functions dealing with rpm package representations.
"""
import rpm
import os
import os.path
import misc
import i18n
import re
import fnmatch
import stat
import warnings
from subprocess import Popen, PIPE
from rpmUtils import RpmUtilsError
import rpmUtils.miscutils
from rpmUtils.miscutils import flagToString, stringToVersion, compareVerOnly
import Errors
import errno
import struct
from constants import *
from operator import itemgetter
import urllib
import urlparse
urlparse.uses_fragment.append("media")
from urlgrabber.grabber import URLGrabber, URLGrabError
try:
import xattr
if not hasattr(xattr, 'get'):
xattr = None # This is a "newer" API.
except ImportError:
xattr = None
import pwd
import grp
def comparePoEVR(po1, po2):
"""
Compare two Package or PackageEVR objects.
"""
(e1, v1, r1) = (po1.epoch, po1.version, po1.release)
(e2, v2, r2) = (po2.epoch, po2.version, po2.release)
return rpmUtils.miscutils.compareEVR((e1, v1, r1), (e2, v2, r2))
def comparePoEVREQ(po1, po2):
"""
Compare two Package or PackageEVR objects for equality.
"""
(e1, v1, r1) = (po1.epoch, po1.version, po1.release)
(e2, v2, r2) = (po2.epoch, po2.version, po2.release)
if r1 != r2: return False
if v1 != v2: return False
if e1 != e2: return False
return True
def buildPkgRefDict(pkgs, casematch=True):
"""take a list of pkg objects and return a dict the contains all the possible
naming conventions for them eg: for (name,i386,0,1,1)
dict[name] = (name, i386, 0, 1, 1)
dict[name.i386] = (name, i386, 0, 1, 1)
dict[name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-1] = (name, i386, 0, 1, 1)
dict[name-1-1] = (name, i386, 0, 1, 1)
dict[0:name-1-1.i386] = (name, i386, 0, 1, 1)
dict[name-0:1-1.i386] = (name, i386, 0, 1, 1)
"""
pkgdict = {}
for pkg in pkgs:
(n, a, e, v, r) = pkg.pkgtup
if not casematch:
n = n.lower()
a = a.lower()
e = e.lower()
v = v.lower()
r = r.lower()
name = n
nameArch = '%s.%s' % (n, a)
nameVerRelArch = '%s-%s-%s.%s' % (n, v, r, a)
nameVer = '%s-%s' % (n, v)
nameVerRel = '%s-%s-%s' % (n, v, r)
envra = '%s:%s-%s-%s.%s' % (e, n, v, r, a)
nevra = '%s-%s:%s-%s.%s' % (n, e, v, r, a)
for item in [name, nameArch, nameVerRelArch, nameVer, nameVerRel, envra, nevra]:
if item not in pkgdict:
pkgdict[item] = []
pkgdict[item].append(pkg)
return pkgdict
def parsePackages(pkgs, usercommands, casematch=0,
unique='repo-epoch-name-version-release-arch',
pkgdict=None):
"""matches up the user request versus a pkg list:
for installs/updates available pkgs should be the 'others list'
for removes it should be the installed list of pkgs
takes an optional casematch option to determine if case should be matched
exactly. Defaults to not matching."""
if pkgdict is None:
pkgdict = buildPkgRefDict(pkgs, bool(casematch))
exactmatch = []
matched = []
unmatched = []
for command in usercommands:
if not casematch:
command = command.lower()
if command in pkgdict:
exactmatch.extend(pkgdict[command])
del pkgdict[command]
else:
# anything we couldn't find a match for
# could mean it's not there, could mean it's a wildcard
if misc.re_glob(command):
trylist = pkgdict.keys()
# command and pkgdict are already lowered if not casematch
# so case sensitive is always fine
regex = misc.compile_pattern(command)
foundit = 0
for item in trylist:
if regex(item):
matched.extend(pkgdict[item])
del pkgdict[item]
foundit = 1
if not foundit:
unmatched.append(command)
else:
unmatched.append(command)
unmatched = misc.unique(unmatched)
if unique == 'repo-epoch-name-version-release-arch': # pkg.__hash__
matched = misc.unique(matched)
exactmatch = misc.unique(exactmatch)
elif unique == 'repo-pkgkey': # So we get all pkg entries from a repo
def pkgunique(pkgs):
u = {}
for pkg in pkgs:
mark = "%s%s" % (pkg.repo.id, pkg.pkgKey)
u[mark] = pkg
return u.values()
matched = pkgunique(matched)
exactmatch = pkgunique(exactmatch)
else:
raise ValueError, "Bad value for unique: %s" % unique
return exactmatch, matched, unmatched
class FakeSack:
""" Fake PackageSack to use with FakeRepository"""
def __init__(self):
pass # This is fake, so do nothing
def have_fastReturnFileEntries(self):
""" Is calling pkg.returnFileEntries(primary_only=True) faster than
using searchFiles(). """
return True
def delPackage(self, obj):
"""delete a pkgobject, do nothing, but make localpackages work with --skip-broken"""
pass # This is fake, so do nothing
class FakeRepository:
"""Fake repository class for use in rpmsack package objects"""
def _set_cleanup_repoid(self, repoid):
""" Set the repoid, but because it can be random ... clean it up. """
# We don't want repoids to contain random bytes that can be
# in the FS directories. It's also nice if they aren't "huge". So
# just chop to the rpm name.
pathbased = False
if '/' in repoid:
repoid = os.path.basename(repoid)
pathbased = True
if repoid.endswith(".rpm"):
repoid = repoid[:-4]
pathbased = True
bytes = [] # Just in case someone uses mv to be evil:
if pathbased:
bytes.append('/')
for byte in repoid:
if ord(byte) >= 128:
byte = '?'
bytes.append(byte)
self.id = "".join(bytes)
def __init__(self, repoid):
self._set_cleanup_repoid(repoid)
self.name = self.id
self.sack = FakeSack()
def __cmp__(self, other):
if self.id > other.id:
return 1
elif self.id < other.id:
return -1
else:
return 0
def __hash__(self):
return hash(self.id)
def __str__(self):
return self.id
def _ui_id(self):
return self.id
ui_id = property(fget=lambda self: self._ui_id())
class PackageObject(object):
"""Base Package Object - sets up the default storage dicts and the
most common returns"""
def __init__(self):
self.name = None
self.version = None
self.release = None
self.epoch = None
self.arch = None
# self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release)
self._checksums = [] # (type, checksum, id(0,1)
def _ui_envra(self):
if self.epoch == '0':
return self.nvra
else:
return self.envra
ui_envra = property(fget=lambda self: self._ui_envra())
def _ui_nevra(self):
if self.epoch == '0':
return self.nvra
else:
return self.nevra
ui_nevra = property(fget=lambda self: self._ui_nevra())
def _ui_evr(self):
if self.epoch == '0':
return self.vr
else:
return self.evr
ui_evr = property(fget=lambda self: self._ui_evr())
def _ui_evra(self):
if self.epoch == '0':
return self.vra
else:
return self.evra
ui_evra = property(fget=lambda self: self._ui_evra())
def _ui_nevr(self):
if self.epoch == '0':
return self.nvr
else:
return self.nevr
ui_nevr = property(fget=lambda self: self._ui_nevr())
def _na(self):
return '%s.%s' % (self.name, self.arch)
na = property(fget=lambda self: self._na())
def _vr(self):
return '%s-%s' % (self.version, self.release)
vr = property(fget=lambda self: self._vr())
def _vra(self):
return '%s-%s.%s' % (self.version, self.release, self.arch)
vra = property(fget=lambda self: self._vra())
def _evr(self):
return '%s:%s-%s' % (self.epoch, self.version, self.release)
evr = property(fget=lambda self: self._evr())
def _evra(self):
return '%s:%s-%s.%s' % (self.epoch,self.version,self.release, self.arch)
evra = property(fget=lambda self: self._evra())
def _nvr(self):
return '%s-%s-%s' % (self.name, self.version, self.release)
nvr = property(fget=lambda self: self._nvr())
def _nvra(self):
return '%s-%s-%s.%s' % (self.name, self.version,self.release, self.arch)
nvra = property(fget=lambda self: self._nvra())
def _nevr(self):
return '%s-%s:%s-%s' % (self.name, self.epoch,self.version,self.release)
nevr = property(fget=lambda self: self._nevr())
def _nevra(self):
return '%s-%s:%s-%s.%s' % (self.name,
self.epoch, self.version, self.release,
self.arch)
nevra = property(fget=lambda self: self._nevra())
def _envr(self):
return '%s:%s-%s-%s' % (self.epoch,self.name, self.version,self.release)
envr = property(fget=lambda self: self._envr())
def _envra(self):
return '%s:%s-%s-%s.%s' % (self.epoch, self.name,
self.version, self.release,
self.arch)
envra = property(fget=lambda self: self._envra())
def __str__(self):
return self.ui_envra
def printVer(self):
"""returns a printable version string - including epoch, if it's set"""
if self.epoch != '0':
ver = '%s:%s-%s' % (self.epoch, self.version, self.release)
else:
ver = '%s-%s' % (self.version, self.release)
return ver
def verCMP(self, other):
""" Compare package to another one, only rpm-version ordering. """
if not other:
return 1
ret = cmp(self.name, other.name)
if ret == 0:
ret = comparePoEVR(self, other)
return ret
def __cmp__(self, other):
""" Compare packages, this is just for UI/consistency. """
ret = self.verCMP(other)
if ret == 0:
ret = cmp(self.arch, other.arch)
if ret == 0 and hasattr(self, 'repoid') and hasattr(other, 'repoid'):
ret = cmp(self.repoid, other.repoid)
# We want 'installed' to appear over 'abcd' and 'xyz', so boost that
if ret and self.repoid == 'installed':
return 1
if ret and other.repoid == 'installed':
return -1
return ret
def __eq__(self, other):
""" Compare packages for yes/no equality, includes everything in the
UI package comparison. """
if not other:
return False
if self.pkgtup != other.pkgtup:
return False
if hasattr(self, 'repoid') and hasattr(other, 'repoid'):
if self.repoid != other.repoid:
return False
return True
def __ne__(self, other):
if not (self == other):
return True
return False
def __getitem__(self, key):
return getattr(self, key)
def verEQ(self, other):
""" Compare package to another one, only rpm-version equality. """
if not other:
return None
ret = cmp(self.name, other.name)
if ret != 0:
return False
return comparePoEVREQ(self, other)
def verNE(self, other):
""" Compare package to another one, only rpm-version inequality. """
if not other:
return None
return not self.verEQ(other)
def verLT(self, other):
""" Uses verCMP, tests if the other _rpm-version_ is < ours. """
return self.verCMP(other) < 0
def verLE(self, other):
""" Uses verCMP, tests if the other _rpm-version_ is <= ours. """
return self.verCMP(other) <= 0
def verGT(self, other):
""" Uses verCMP, tests if the other _rpm-version_ is > ours. """
return self.verCMP(other) > 0
def verGE(self, other):
""" Uses verCMP, tests if the other _rpm-version_ is >= ours. """
return self.verCMP(other) >= 0
def __repr__(self):
return "<%s : %s (%s)>" % (self.__class__.__name__, str(self),hex(id(self)))
def returnSimple(self, varname):
warnings.warn("returnSimple() will go away in a future version of Yum.\n",
Errors.YumFutureDeprecationWarning, stacklevel=2)
return getattr(self, varname)
def returnChecksums(self):
return self._checksums
checksums = property(fget=lambda self: self.returnChecksums())
def returnIdSum(self):
for (csumtype, csum, csumid) in self.checksums:
if csumid:
return (csumtype, csum)
_not_found_repo = FakeRepository('-')
_not_found_repo.cost = 0
class YumNotFoundPackage(PackageObject):
def __init__(self, pkgtup):
self.name = pkgtup[0]
self.arch = pkgtup[1]
self.epoch = pkgtup[2]
self.version = pkgtup[3]
self.release = pkgtup[4]
self.pkgtup = pkgtup
self.size = 0
self._checksums = [] # (type, checksum, id(0,1)
self.repo = _not_found_repo
self.repoid = _not_found_repo.id
# Fakeout output.py that it's a real pkg. ...
def _ui_from_repo(self):
""" This just returns '-' """
return self.repoid
ui_from_repo = property(fget=lambda self: self._ui_from_repo())
def verifyLocalPkg(self):
"""check the package checksum vs the localPkg
return True if pkg is good, False if not"""
return False
class RpmBase(object):
"""return functions and storage for rpm-specific data"""
def __init__(self):
self.prco = {}
self.prco['obsoletes'] = [] # (name, flag, (e,v,r))
self.prco['conflicts'] = [] # (name, flag, (e,v,r))
self.prco['requires'] = [] # (name, flag, (e,v,r))
self.prco['provides'] = [] # (name, flag, (e,v,r))
self.files = {}
self.files['file'] = []
self.files['dir'] = []
self.files['ghost'] = []
self._changelog = [] # (ctime, cname, ctext)
self.licenses = []
self._hash = None
# FIXME: This is identical to PackageObject.__eq__ and __ne__, should be
# remove (is .repoid fine here? ... we need it, maybe .repo.id).
def __eq__(self, other):
if not other: # check if other not is a package object.
return False
if self.pkgtup != other.pkgtup:
return False
if self.repoid != other.repoid:
return False
return True
def __ne__(self, other):
if not (self == other):
return True
return False
def returnEVR(self):
return PackageEVR(self.epoch, self.version, self.release)
def __hash__(self):
if self._hash is None:
mystr = '%s - %s:%s-%s-%s.%s' % (self.repo.id, self.epoch, self.name,
self.version, self.release, self.arch)
self._hash = hash(mystr)
return self._hash
def returnPrco(self, prcotype, printable=False):
"""return list of provides, requires, conflicts or obsoletes"""
prcos = self.prco.get(prcotype, [])
if printable:
results = []
for prco in prcos:
if not prco[0]: # empty or none or whatever, doesn't matter
continue
results.append(misc.prco_tuple_to_string(prco))
return results
return prcos
def checkPrco(self, prcotype, prcotuple):
"""returns 1 or 0 if the pkg contains the requested tuple/tuple range"""
# get rid of simple cases - nothing
if prcotype not in self.prco:
return 0
# First try and exact match, then search
# Make it faster, if it's "big".
if len(self.prco[prcotype]) <= 8:
if prcotuple in self.prco[prcotype]:
return 1
else:
if not hasattr(self, '_prco_lookup'):
self._prco_lookup = {'obsoletes' : None, 'conflicts' : None,
'requires' : None, 'provides' : None}
if self._prco_lookup[prcotype] is None:
self._prco_lookup[prcotype] = set(self.prco[prcotype])
if prcotuple in self._prco_lookup[prcotype]:
return 1
# make us look it up and compare
(reqn, reqf, (reqe, reqv ,reqr)) = prcotuple
if reqf is not None:
return self.inPrcoRange(prcotype, prcotuple)
else:
for (n, f, (e, v, r)) in self.returnPrco(prcotype):
if i18n.str_eq(reqn, n):
return 1
return 0
def inPrcoRange(self, prcotype, reqtuple):
"""returns true if the package has a the prco that satisfies
the reqtuple range, assume false.
Takes: prcotype, requested prco tuple"""
return bool(self.matchingPrcos(prcotype, reqtuple))
def matchingPrcos(self, prcotype, reqtuple):
(reqn, reqf, (reqe, reqv, reqr)) = reqtuple
# find the named entry in pkgobj, do the comparsion
result = []
for (n, f, (e, v, r)) in self.returnPrco(prcotype):
if not i18n.str_eq(reqn, n):
continue
if f == '=':
f = 'EQ'
if f != 'EQ' and prcotype == 'provides':
# isn't this odd, it's not 'EQ' and it is a provides
# - it really should be EQ
# use the pkgobj's evr for the comparison
if e is None:
e = self.epoch
if v is None:
v = self.ver
if r is None:
r = self.rel
#(e, v, r) = (self.epoch, self.ver, self.rel)
matched = rpmUtils.miscutils.rangeCompare(
reqtuple, (n, f, (e, v, r)))
if matched:
result.append((n, f, (e, v, r)))
return result
def provides_for(self, reqtuple):
"""check to see if the package object provides for the requirement
passed, including searching filelists if the requirement is a file
dep"""
if self.checkPrco('provides', reqtuple):
return True
if reqtuple[0].startswith('/'):
if misc.re_primary_filename(reqtuple[0]):
pri_only = True
else:
pri_only = False
for ftype in ('file', 'dir', 'ghost'):
if reqtuple[0] in self.returnFileEntries(ftype, pri_only):
return True
return False
def returnChangelog(self):
"""return changelog entries"""
return self._changelog
def returnFileEntries(self, ftype='file', primary_only=False):
"""return list of files based on type, you can pass primary_only=True
to limit to those files in the primary repodata"""
if self.files:
if ftype in self.files:
if primary_only:
if ftype == 'dir':
match = misc.re_primary_dirname
else:
match = misc.re_primary_filename
return [fn for fn in self.files[ftype] if match(fn)]
return self.files[ftype]
return []
def returnFileTypes(self, primary_only=False):
"""return list of types of files in the package, you can pass
primary_only=True to limit to those files in the primary repodata"""
if primary_only:
ret = [] # We only return the types for the primary files.
for ftype in self.files.keys():
if ftype == 'dir':
match = misc.re_primary_dirname
else:
match = misc.re_primary_filename
# As soon as we find a primary file of this type, we can
# return it.
for fn in self.files[ftype]:
if match(fn):
break
else:
continue
ret.append(ftype)
return ret
return self.files.keys()
def returnPrcoNames(self, prcotype):
if not hasattr(self, '_cache_prco_names_' + prcotype):
data = [n for (n, f, v) in self.returnPrco(prcotype)]
setattr(self, '_cache_prco_names_' + prcotype, data)
return getattr(self, '_cache_prco_names_' + prcotype)
def getProvidesNames(self):
warnings.warn('getProvidesNames() will go away in a future version of Yum.\n',
Errors.YumDeprecationWarning, stacklevel=2)
return self.provides_names
def simpleFiles(self, ftype='files'):
warnings.warn('simpleFiles() will go away in a future version of Yum.'
'Use returnFileEntries(primary_only=True)\n',
Errors.YumDeprecationWarning, stacklevel=2)
if self.files and ftype in self.files:
return self.files[ftype]
return []
filelist = property(fget=lambda self: self.returnFileEntries(ftype='file'))
dirlist = property(fget=lambda self: self.returnFileEntries(ftype='dir'))
ghostlist = property(fget=lambda self: self.returnFileEntries(ftype='ghost'))
requires = property(fget=lambda self: self.returnPrco('requires'))
strong_requires = property(fget=lambda self: self.returnPrco('strong_requires'))
provides = property(fget=lambda self: self.returnPrco('provides'))
obsoletes = property(fget=lambda self: self.returnPrco('obsoletes'))
conflicts = property(fget=lambda self: self.returnPrco('conflicts'))
provides_names = property(fget=lambda self: self.returnPrcoNames('provides'))
requires_names = property(fget=lambda self: self.returnPrcoNames('requires'))
strong_requires_names = property(fget=lambda self: self.returnPrcoNames('strong_requires'))
conflicts_names = property(fget=lambda self: self.returnPrcoNames('conflicts'))
obsoletes_names = property(fget=lambda self: self.returnPrcoNames('obsoletes'))
provides_print = property(fget=lambda self: self.returnPrco('provides', True))
requires_print = property(fget=lambda self: self.returnPrco('requires', True))
strong_requires_print = property(fget=lambda self: self.returnPrco('strong_requires', True))
conflicts_print = property(fget=lambda self: self.returnPrco('conflicts', True))
obsoletes_print = property(fget=lambda self: self.returnPrco('obsoletes', True))
changelog = property(fget=lambda self: self.returnChangelog())
EVR = property(fget=lambda self: self.returnEVR())
def _getBaseName(self):
""" Return the "base name" of the package, atm. we can only look at
the sourcerpm. """
if hasattr(self, '_base_package_name_ret'):
return self._base_package_name_ret
if hasattr(self, 'sourcerpm') and self.sourcerpm:
(n, v, r, e, a) = rpmUtils.miscutils.splitFilename(self.sourcerpm)
if n != self.name:
self._base_package_name_ret = n
return n
# If there is no sourcerpm, or sourcerpm == us, use .name
self._base_package_name_ret = self.name
return self._base_package_name_ret
base_package_name = property(fget=lambda self: self._getBaseName())
def have_fastReturnFileEntries(self):
""" Is calling pkg.returnFileEntries(primary_only=True) faster than
using searchFiles(). """
return self.repo.sack.have_fastReturnFileEntries()
def obsoletedBy(self, obsoleters, limit=0):
""" Returns list of obsoleters that obsolete this package. Note that we
don't do obsoleting loops. If limit is != 0, then we stop after
finding that many. """
provtup = (self.name, 'EQ', (self.epoch, self.version, self.release))
ret = []
for obspo in obsoleters:
if obspo.inPrcoRange('obsoletes', provtup):
ret.append(obspo)
if limit and len(ret) > limit:
break
return ret
class PackageEVR:
"""
A comparable epoch, version, and release representation. Note that you
almost certainly want to use pkg.verEQ() or pkg.verGT() etc. instead.
"""
def __init__(self,e,v,r):
self.epoch = e
self.ver = v
self.version = v
self.rel = r
self.release = r
def compare(self,other):
return rpmUtils.miscutils.compareEVR((self.epoch, self.ver, self.rel), (other.epoch, other.ver, other.rel))
def __lt__(self, other):
if self.compare(other) < 0:
return True
return False
def __gt__(self, other):
if self.compare(other) > 0:
return True
return False
def __le__(self, other):
if self.compare(other) <= 0:
return True
return False
def __ge__(self, other):
if self.compare(other) >= 0:
return True
return False
def __eq__(self, other):
return comparePoEVREQ(self, other)
def __ne__(self, other):
if not (self == other):
return True
return False
class YumAvailablePackage(PackageObject, RpmBase):
"""derived class for the packageobject and RpmBase packageobject yum
uses this for dealing with packages in a repository"""
def __init__(self, repo, pkgdict = None):
PackageObject.__init__(self)
RpmBase.__init__(self)
self.repoid = repo.id
self.repo = repo
self.state = None
self._loadedfiles = False
self._verify_local_pkg_cache = None
if pkgdict != None:
self.importFromDict(pkgdict)
self.ver = self.version
self.rel = self.release
self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release)
def _ui_from_repo(self):
""" This reports the repo the package is from, we integrate YUMDB info.
for RPM packages so a package from "fedora" that is installed has a
ui_from_repo of "@fedora". Note that, esp. with the --releasever
option, "fedora" or "rawhide" isn't authoritative.
So we also check against the current releasever and if it is
different we also print the YUMDB releasever. This means that
installing from F12 fedora, while running F12, would report as
"@fedora/13". """
if self.repoid == 'installed' and 'from_repo' in self.yumdb_info:
end = ''
if (self.rpmdb.releasever is not None and
'releasever' in self.yumdb_info and
self.yumdb_info.releasever != self.rpmdb.releasever):
end = '/' + self.yumdb_info.releasever
return '@' + self.yumdb_info.from_repo + end
return self.repoid
ui_from_repo = property(fget=lambda self: self._ui_from_repo())
def exclude(self):
"""remove self from package sack"""
self.repo.sack.delPackage(self)
def printVer(self):
"""returns a printable version string - including epoch, if it's set"""
if self.epoch != '0':
ver = '%s:%s-%s' % (self.epoch, self.version, self.release)
else:
ver = '%s-%s' % (self.version, self.release)
return ver
def compactPrint(self):
ver = self.printVer()
return "%s.%s %s" % (self.name, self.arch, ver)
def _size(self):
return self.packagesize
def _remote_path(self):
return self.relativepath
def _remote_url(self):
"""returns a URL that can be used for downloading the package.
Note that if you're going to download the package in your tool,
you should use self.repo.getPackage."""
base = self.basepath
if base:
# urljoin sucks in the reverse way that os.path.join sucks :)
if base[-1] != '/':
base = base + '/'
return urlparse.urljoin(base, self.remote_path)
return urlparse.urljoin(self.repo.urls[0], self.remote_path)
size = property(fget=lambda self: self._size())
remote_path = property(_remote_path)
remote_url = property(lambda self: self._remote_url())
def _committer(self):
"Returns the name of the last person to do a commit to the changelog."
if hasattr(self, '_committer_ret'):
return self._committer_ret
if not len(self.changelog): # Empty changelog is _possible_ I guess
self._committer_ret = self.packager
return self._committer_ret
val = self.changelog[0][1]
# Chagnelog data is in multiple locale's, so we convert to ascii
# ignoring "bad" chars.
val = misc.to_unicode(val, errors='replace')
val = val.encode('ascii', 'replace')
# Hacky way to get rid of version numbers...
ix = val.find('> ')
if ix != -1:
val = val[0:ix+1]
self._committer_ret = val
return self._committer_ret
committer = property(_committer)
def _committime(self):
"Returns the time of the last commit to the changelog."
if hasattr(self, '_committime_ret'):
return self._committime_ret
if not len(self.changelog): # Empty changelog is _possible_ I guess
self._committime_ret = self.buildtime
return self._committime_ret
self._committime_ret = self.changelog[0][0]
return self._committime_ret
committime = property(_committime)
# FIXME test this to see if it causes hell elsewhere
def _checksum(self):
"Returns the 'default' checksum"
return self.checksums[0][1]
checksum = property(_checksum)
def getDiscNum(self):
if self.basepath is None:
return None
(scheme, netloc, path, query, fragid) = urlparse.urlsplit(self.basepath)
if scheme == "media":
if len(fragid) == 0:
return 0
return int(fragid)
return None
def returnHeaderFromPackage(self):
rpmfile = self.localPkg()
ts = rpmUtils.transaction.initReadOnlyTransaction()
try:
hdr = rpmUtils.miscutils.hdrFromPackage(ts, rpmfile)
except rpmUtils.RpmUtilsError:
raise Errors.RepoError, 'Package Header %s: RPM Cannot open' % self
return hdr
def returnLocalHeader(self):
"""returns an rpm header object from the package object's local
header cache"""
if os.path.exists(self.localHdr()):
try:
hlist = rpm.readHeaderListFromFile(self.localHdr())
hdr = hlist[0]
except (rpm.error, IndexError):
raise Errors.RepoError, 'Package Header %s: Cannot open' % self
else:
raise Errors.RepoError, 'Package Header %s: Not Available' % self
return hdr
def localPkg(self):
"""return path to local package (whether it is present there, or not)"""
if not hasattr(self, 'localpath'):
rpmfn = os.path.basename(self.remote_path)
self.localpath = self.repo.pkgdir + '/' + rpmfn
return self.localpath
def localHdr(self):
"""return path to local cached Header file downloaded from package
byte ranges"""
if not hasattr(self, 'hdrpath'):
pkgname = os.path.basename(self.remote_path)
hdrname = pkgname[:-4] + '.hdr'
self.hdrpath = self.repo.hdrdir + '/' + hdrname
return self.hdrpath
def verifyLocalPkg(self):
"""check the package checksum vs the localPkg
return True if pkg is good, False if not"""
# This is called a few times now, so we want some way to not have to
# read+checksum "large" datasets multiple times per. transaction.
try:
nst = os.stat(self.localPkg())
except OSError, e:
return False
if (hasattr(self, '_verify_local_pkg_cache') and
self._verify_local_pkg_cache):
ost = self._verify_local_pkg_cache
if (ost.st_ino == nst.st_ino and
ost.st_dev == nst.st_dev and
ost.st_mtime == nst.st_mtime and
ost.st_size == nst.st_size):
return True
(csum_type, csum) = self.returnIdSum()
try:
filesum = misc.checksum(csum_type, self.localPkg(),
datasize=self.packagesize)
except Errors.MiscError:
return False
if filesum != csum:
return False
self._verify_local_pkg_cache = nst
return True
# See: http://www.freedesktop.org/wiki/CommonExtendedAttributes
def _localXattrUrl(self):
""" Get the user.xdg.origin.url value from the local pkg. ... if it's
present. We cache this so we can access it after the file has been
deleted (keepcache=False). """
if xattr is None:
return None
if hasattr(self, '__cached_localXattrUrl'):
return getattr(self, '__cached_localXattrUrl')
if not self.verifyLocalPkg():
return None
try:
ret = xattr.get(self.localPkg(), 'user.xdg.origin.url')
except: # Documented to be "EnvironmentError", but make sure
return None
setattr(self, '__cached_localXattrUrl', ret)
return ret
xattr_origin_url = property(lambda x: x._localXattrUrl())
def prcoPrintable(self, prcoTuple):
"""convert the prco tuples into a nicer human string"""
warnings.warn('prcoPrintable() will go away in a future version of Yum.\n',
Errors.YumDeprecationWarning, stacklevel=2)
return misc.prco_tuple_to_string(prcoTuple)
def requiresList(self):
"""return a list of requires in normal rpm format"""
return self.requires_print
def returnChecksums(self):
return [(self.checksum_type, self.pkgId, 1)]
def importFromDict(self, pkgdict):
"""handles an mdCache package dictionary item to populate out
the package information"""
# translates from the pkgdict, populating out the information for the
# packageObject
if hasattr(pkgdict, 'nevra'):
(n, e, v, r, a) = pkgdict.nevra
self.name = n
self.epoch = e
self.version = v
self.arch = a
self.release = r
if hasattr(pkgdict, 'time'):
self.buildtime = pkgdict.time['build']
self.filetime = pkgdict.time['file']
if hasattr(pkgdict, 'size'):
self.packagesize = pkgdict.size['package']
self.archivesize = pkgdict.size['archive']
self.installedsize = pkgdict.size['installed']
if hasattr(pkgdict, 'location'):
url = pkgdict.location.get('base')
if url == '':
url = None
self.basepath = url
self.relativepath = pkgdict.location['href']
if hasattr(pkgdict, 'hdrange'):
self.hdrstart = pkgdict.hdrange['start']
self.hdrend = pkgdict.hdrange['end']
if hasattr(pkgdict, 'info'):
for item in ['summary', 'description', 'packager', 'group',
'buildhost', 'sourcerpm', 'url', 'vendor']:
setattr(self, item, pkgdict.info[item])
self.summary = self.summary.replace('\n', '')
self.licenses.append(pkgdict.info['license'])
if hasattr(pkgdict, 'files'):
for fn in pkgdict.files:
ftype = pkgdict.files[fn]
if ftype not in self.files:
self.files[ftype] = []
self.files[ftype].append(fn)
if hasattr(pkgdict, 'prco'):
for rtype in pkgdict.prco:
for rdict in pkgdict.prco[rtype]:
name = rdict['name']
f = rdict.get('flags')
e = rdict.get('epoch')
v = rdict.get('ver')
r = rdict.get('rel')
self.prco[rtype].append((name, f, (e,v,r)))
if hasattr(pkgdict, 'changelog'):
for cdict in pkgdict.changelog:
date = cdict.get('date')
text = cdict.get('value')
author = cdict.get('author')
self._changelog.append((date, author, text))
if hasattr(pkgdict, 'checksum'):
ctype = pkgdict.checksum['type']
csum = pkgdict.checksum['value']
csumid = pkgdict.checksum['pkgid']
if csumid is None or csumid.upper() == 'NO':
csumid = 0
elif csumid.upper() == 'YES':
csumid = 1
else:
csumid = 0
self._checksums.append((ctype, csum, csumid))
def _return_remote_location(self):
# break self.remote_url up into smaller pieces
base = os.path.dirname(self.remote_url)
href = os.path.basename(self.remote_url)
msg = """<location xml:base="%s" href="%s"/>\n""" % (
misc.to_xml(base,attrib=True), misc.to_xml(href, attrib=True))
return msg
def _dump_base_items(self):
packager = url = ''
if self.packager:
packager = misc.to_xml(self.packager)
if self.url:
url = misc.to_xml(self.url)
(csum_type, csum, csumid) = self.checksums[0]
msg = """
<name>%s</name>
<arch>%s</arch>
<version epoch="%s" ver="%s" rel="%s"/>
<checksum type="%s" pkgid="YES">%s</checksum>
<summary>%s</summary>
<description>%s</description>
<packager>%s</packager>
<url>%s</url>
<time file="%s" build="%s"/>
<size package="%s" installed="%s" archive="%s"/>\n""" % (self.name,
self.arch, self.epoch, self.ver, self.rel, csum_type, csum,
misc.to_xml(self.summary),
misc.to_xml(self.description),
packager, url, self.filetime,
self.buildtime, self.packagesize, self.installedsize, self.archivesize)
msg += self._return_remote_location()
return msg
def _dump_format_items(self):
msg = " <format>\n"
if self.license:
msg += """ <rpm:license>%s</rpm:license>\n""" % misc.to_xml(self.license)
else:
msg += """ <rpm:license/>\n"""
if self.vendor:
msg += """ <rpm:vendor>%s</rpm:vendor>\n""" % misc.to_xml(self.vendor)
else:
msg += """ <rpm:vendor/>\n"""
if self.group:
msg += """ <rpm:group>%s</rpm:group>\n""" % misc.to_xml(self.group)
else:
msg += """ <rpm:group/>\n"""
if self.buildhost:
msg += """ <rpm:buildhost>%s</rpm:buildhost>\n""" % misc.to_xml(self.buildhost)
else:
msg += """ <rpm:buildhost/>\n"""
if self.sourcerpm:
msg += """ <rpm:sourcerpm>%s</rpm:sourcerpm>\n""" % misc.to_xml(self.sourcerpm)
else: # b/c yum 2.4.3 and OLD y-m-p willgfreak out if it is not there.
msg += """ <rpm:sourcerpm/>\n"""
msg +=""" <rpm:header-range start="%s" end="%s"/>""" % (self.hdrstart,
self.hdrend)
msg += self._dump_pco('provides')
msg += self._dump_requires()
msg += self._dump_pco('conflicts')
msg += self._dump_pco('obsoletes')
msg += self._dump_files(True)
if msg[-1] != '\n':
msg += """\n"""
msg += """ </format>"""
return msg
def _dump_pco(self, pcotype):
msg = ""
mylist = getattr(self, pcotype)
if mylist: msg = "\n <rpm:%s>\n" % pcotype
for (name, flags, (e,v,r)) in sorted(mylist):
pcostring = ''' <rpm:entry name="%s"''' % misc.to_xml(name, attrib=True)
if flags:
pcostring += ''' flags="%s"''' % misc.to_xml(flags, attrib=True)
if e:
pcostring += ''' epoch="%s"''' % misc.to_xml(e, attrib=True)
if v:
pcostring += ''' ver="%s"''' % misc.to_xml(v, attrib=True)
if r:
pcostring += ''' rel="%s"''' % misc.to_xml(r, attrib=True)
pcostring += "/>\n"
msg += pcostring
if mylist: msg += " </rpm:%s>" % pcotype
return msg
def _dump_files(self, primary=False):
msg = []
for fn in sorted(self.returnFileEntries('file', primary)):
msg.append('\n <file>%s</file>' % misc.to_xml(fn))
for fn in sorted(self.returnFileEntries('dir', primary)):
msg.append('\n <file type="dir">%s</file>' % misc.to_xml(fn))
for fn in sorted(self.returnFileEntries('ghost', primary)):
msg.append('\n <file type="ghost">%s</file>' % misc.to_xml(fn))
return ''.join(msg)
def _requires_with_pre(self):
raise NotImplementedError()
def _dump_requires(self):
"""returns deps in XML format"""
mylist = self._requires_with_pre()
msg = ""
if mylist: msg = "\n <rpm:requires>\n"
if hasattr(self, '_collapse_libc_requires') and self._collapse_libc_requires:
libc_requires = filter(lambda x: x[0].startswith('libc.so.6'), mylist)
if libc_requires:
rest = sorted(libc_requires, cmp=compareVerOnly, key=itemgetter(0))
best = rest.pop()
if len(rest) > 0 and best[0].startswith('libc.so.6()'): # rpmvercmp will sort this one as 'highest' so we need to remove it from the list
best = rest.pop()
newlist = []
for i in mylist:
if i[0].startswith('libc.so.6') and i != best:
continue
newlist.append(i)
mylist = newlist
used = 0
for (name, flags, (e,v,r),pre) in sorted(mylist):
if name.startswith('rpmlib('):
continue
# this drops out requires that the pkg provides for itself.
if name in self.provides_names or \
(name.startswith('/') and \
(name in self.filelist or name in self.dirlist or
name in self.ghostlist)):
if not flags:
continue
else:
if self.checkPrco('provides', (name, flags, (e,v,r))):
continue
prcostring = ''' <rpm:entry name="%s"''' % misc.to_xml(name, attrib=True)
if flags:
prcostring += ''' flags="%s"''' % misc.to_xml(flags, attrib=True)
if e:
prcostring += ''' epoch="%s"''' % misc.to_xml(e, attrib=True)
if v:
prcostring += ''' ver="%s"''' % misc.to_xml(v, attrib=True)
if r:
prcostring += ''' rel="%s"''' % misc.to_xml(r, attrib=True)
if pre not in ("0", 0):
prcostring += ''' pre="%s"''' % pre
prcostring += "/>\n"
msg += prcostring
used += 1
if mylist: msg += " </rpm:requires>"
if used == 0:
return ""
return msg
def _dump_changelog(self, clog_limit):
if not self.changelog:
return ""
msg = []
# We need to output them "backwards", so the oldest is first
if not clog_limit:
clogs = self.changelog
else:
clogs = self.changelog[:clog_limit]
last_ts = 0
hack_ts = 0
for (ts, author, content) in reversed(clogs):
if ts != last_ts:
hack_ts = 0
else:
hack_ts += 1
last_ts = ts
ts += hack_ts
msg.append('\n <changelog author="%s" date="%s">%s</changelog>' % (
misc.to_xml(author, attrib=True), misc.to_xml(str(ts)),
misc.to_xml(content)))
return ''.join(msg)
def xml_dump_primary_metadata(self):
msg = """\n<package type="rpm">"""
msg += self._dump_base_items()
msg += self._dump_format_items()
msg += """\n</package>"""
assert type(msg) is str
return msg
def xml_dump_filelists_metadata(self):
msg = """
<package pkgid="%s" name="%s" arch="%s">
<version epoch="%s" ver="%s" rel="%s"/>%s
</package>""" % (self.checksum, self.name, self.arch,
self.epoch, self.ver, self.rel,
self._dump_files())
assert type(msg) is str
return msg
def xml_dump_other_metadata(self, clog_limit=0):
msg = """
<package pkgid="%s" name="%s" arch="%s">
<version epoch="%s" ver="%s" rel="%s"/>%s
</package>""" % (self.checksum, self.name, self.arch,
self.epoch, self.ver, self.rel,
self._dump_changelog(clog_limit))
assert type(msg) is str
return msg
def _rpm_long_size_hack(hdr, size):
""" Rpm returns None, for certain sizes. And has a "longsize" for the real
values. """
return hdr[size] or hdr['long' + size]
class YumHeaderPackage(YumAvailablePackage):
"""Package object built from an rpm header"""
def __init__(self, repo, hdr):
"""hand in an rpm header, we'll assume it's installed and query from there"""
YumAvailablePackage.__init__(self, repo)
self.hdr = hdr
self.name = misc.share_data(self.hdr['name'])
this_a = self.hdr['arch']
if not this_a: # this should only happen on gpgkeys and other "odd" pkgs
this_a = 'noarch'
self.arch = misc.share_data(this_a)
self.epoch = misc.share_data(self.doepoch())
self.version = misc.share_data(self.hdr['version'])
self.release = misc.share_data(self.hdr['release'])
self.ver = self.version
self.rel = self.release
self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release)
self._loaded_summary = None
self._loaded_description = None
self.pkgid = self.hdr[rpm.RPMTAG_SHA1HEADER]
if not self.pkgid:
self.pkgid = "%s.%s" %(self.hdr['name'], self.hdr['buildtime'])
self.packagesize = _rpm_long_size_hack(self.hdr, 'archivesize')
self.installedsize = _rpm_long_size_hack(self.hdr, 'size')
self.__mode_cache = {}
self.__prcoPopulated = False
def _remote_url(self):
return 'file://' + urllib.quote(os.path.abspath(self.localPkg()))
def _loadSummary(self):
# Summaries "can be" empty, which rpm return [], see BZ 473239, *sigh*
if self._loaded_summary is None:
summary = self._get_hdr()['summary'] or ''
summary = misc.share_data(summary.replace('\n', ''))
self._loaded_summary = summary
return self._loaded_summary
summary = property(lambda x: x._loadSummary())
def _loadDescription(self):
if self._loaded_description is None:
description = self._get_hdr()['description'] or ''
description = misc.share_data(description)
self._loaded_description = description
return self._loaded_description
description = property(lambda x: x._loadDescription())
def __str__(self):
if self.epoch == '0':
val = '%s-%s-%s.%s' % (self.name, self.version, self.release,
self.arch)
else:
val = '%s:%s-%s-%s.%s' % (self.epoch,self.name, self.version,
self.release, self.arch)
return val
def returnPrco(self, prcotype, printable=False):
if not self.__prcoPopulated:
self._populatePrco()
self.__prcoPopulated = True
return YumAvailablePackage.returnPrco(self, prcotype, printable)
def _get_hdr(self):
return self.hdr
def _populatePrco(self):
"Populate the package object with the needed PRCO interface."
tag2prco = { "OBSOLETE": misc.share_data("obsoletes"),
"CONFLICT": misc.share_data("conflicts"),
"REQUIRE": misc.share_data("requires"),
"PROVIDE": misc.share_data("provides") }
hdr = self._get_hdr()
for tag in tag2prco:
name = hdr[getattr(rpm, 'RPMTAG_%sNAME' % tag)]
name = map(misc.share_data, name)
if not name: # empty or none or whatever, doesn't matter
continue
lst = hdr[getattr(rpm, 'RPMTAG_%sFLAGS' % tag)]
if tag == 'REQUIRE':
# Rpm is a bit magic here, and if pkgA requires(pre/post): foo
# it will then let you remove foo _after_ pkgA has been
# installed. So we need to mark those deps. as "weak".
bits = rpm.RPMSENSE_SCRIPT_PRE | rpm.RPMSENSE_SCRIPT_POST
weakreqs = [bool(flag & bits) for flag in lst]
flag = map(rpmUtils.miscutils.flagToString, lst)
flag = map(misc.share_data, flag)
lst = hdr[getattr(rpm, 'RPMTAG_%sVERSION' % tag)]
vers = map(rpmUtils.miscutils.stringToVersion, lst)
vers = map(lambda x: (misc.share_data(x[0]), misc.share_data(x[1]),
misc.share_data(x[2])), vers)
prcotype = tag2prco[tag]
self.prco[prcotype] = map(misc.share_data, zip(name,flag,vers))
if tag == 'REQUIRE':
weakreqs = zip(weakreqs, self.prco[prcotype])
strongreqs = [wreq[1] for wreq in weakreqs if not wreq[0]]
self.prco['strong_requires'] = strongreqs
def tagByName(self, tag):
warnings.warn("tagByName() will go away in a furture version of Yum.\n",
Errors.YumFutureDeprecationWarning, stacklevel=2)
try:
return getattr(self, tag)
except AttributeError:
raise Errors.MiscError, "Unknown header tag %s" % tag
def __getattr__(self, thing):
#FIXME - if an error - return AttributeError, not KeyError
# ONLY FIX THIS AFTER THE API BREAK
if thing.startswith('__') and thing.endswith('__'):
# If these existed, then we wouldn't get here ...
# So these are missing.
raise AttributeError, "%s has no attribute %s" % (self, thing)
try:
return self.hdr[thing]
except KeyError:
# Note above, API break to fix this ... this at least is a nicer
# msg. so we know what we accessed that is bad.
raise KeyError, "%s has no attribute %s" % (self, thing)
except ValueError:
# Note above, API break to fix this ... this at least is a nicer
# msg. so we know what we accessed that is bad.
raise ValueError, "%s has no attribute %s" % (self, thing)
def doepoch(self):
tmpepoch = self.hdr['epoch']
if tmpepoch is None:
epoch = '0'
else:
epoch = str(tmpepoch)
return epoch
def returnLocalHeader(self):
return self.hdr
def _loadFiles(self):
files = self.hdr['filenames']
fileflags = self.hdr['fileflags']
filemodes = self.hdr['filemodes']
filetuple = zip(files, filemodes, fileflags)
if not self._loadedfiles:
for (fn, mode, flag) in filetuple:
#garbage checks
if mode is None or mode == '':
if 'file' not in self.files:
self.files['file'] = []
self.files['file'].append(fn)
continue
if mode not in self.__mode_cache:
self.__mode_cache[mode] = stat.S_ISDIR(mode)
fkey = 'file'
if self.__mode_cache[mode]:
fkey = 'dir'
elif flag is not None and (flag & 64):
fkey = 'ghost'
self.files.setdefault(fkey, []).append(fn)
self._loadedfiles = True
def returnFileEntries(self, ftype='file', primary_only=False):
"""return list of files based on type"""
self._loadFiles()
return YumAvailablePackage.returnFileEntries(self,ftype,primary_only)
def returnChangelog(self):
# note - if we think it is worth keeping changelogs in memory
# then create a _loadChangelog() method to put them into the
# self._changelog attr
if len(self.hdr['changelogname']) > 0:
return zip(self.hdr['changelogtime'],
self.hdr['changelogname'],
self.hdr['changelogtext'])
return []
def returnChecksums(self):
raise NotImplementedError()
def _size(self):
return _rpm_long_size_hack(self.hdr, 'size')
def _is_pre_req(self, flag):
"""check the flags for a requirement, return 1 or 0 whether or not requires
is a pre-requires or a not"""
# FIXME this should probably be put in rpmUtils.miscutils since
# - that's what it is
if flag is not None:
# Note: RPMSENSE_PREREQ == 0 since rpm-4.4'ish
if flag & (rpm.RPMSENSE_PREREQ |
rpm.RPMSENSE_SCRIPT_PRE |
rpm.RPMSENSE_SCRIPT_POST):
return 1
return 0
def _requires_with_pre(self):
"""returns requires with pre-require bit"""
name = self.hdr[rpm.RPMTAG_REQUIRENAME]
lst = self.hdr[rpm.RPMTAG_REQUIREFLAGS]
flag = map(flagToString, lst)
pre = map(self._is_pre_req, lst)
lst = self.hdr[rpm.RPMTAG_REQUIREVERSION]
vers = map(stringToVersion, lst)
if name is not None:
lst = zip(name, flag, vers, pre)
mylist = misc.unique(lst)
return mylist
class _CountedReadFile:
""" Has just a read() method, and keeps a count so we can find out how much
has been read. Implemented so we can get the real size of the file from
prelink. """
def __init__(self, fp):
self.fp = fp
self.read_size = 0
def read(self, size):
ret = self.fp.read(size)
self.read_size += len(ret)
return ret
class _PkgVerifyProb:
""" Holder for each "problem" we find with a pkg.verify(). """
def __init__(self, type, msg, ftypes, fake=False):
self.type = type
self.message = msg
self.database_value = None
self.disk_value = None
self.file_types = ftypes
self.fake = fake
def __cmp__(self, other):
if other is None:
return 1
type2sort = {'type' : 1, 'symlink' : 2, 'checksum' : 3, 'size' : 4,
'user' : 4, 'group' : 5, 'mode' : 6, 'genchecksum' : 7,
'mtime' : 8, 'missing' : 9, 'permissions-missing' : 10,
'state' : 11, 'missingok' : 12, 'ghost' : 13}
ret = cmp(type2sort[self.type], type2sort[other.type])
if not ret:
for attr in ['disk_value', 'database_value', 'file_types']:
x = getattr(self, attr)
y = getattr(other, attr)
if x is None:
assert y is None
continue
ret = cmp(x, y)
if ret:
break
return ret
_RPMVERIFY_DIGEST = (1 << 0)
_RPMVERIFY_FILESIZE = (1 << 1)
_RPMVERIFY_LINKTO = (1 << 2)
_RPMVERIFY_USER = (1 << 3)
_RPMVERIFY_GROUP = (1 << 4)
_RPMVERIFY_MTIME = (1 << 5)
_RPMVERIFY_MODE = (1 << 6)
_RPMVERIFY_RDEV = (1 << 7)
_RPMVERIFY_CAPS = (1 << 8)
_RPMVERIFY_CONTEXTS = (1 << 15)
class YUMVerifyPackageFile(object):
def __init__(self, filename):
self.filename = filename
self.readlink = None
self.mtime = None
self.dev = None
self.user = None
self.group = None
self.mode = None
self.digest = None
self.size = None
self.verify_ftype = False
self.rpmfile_type = set()
self.rpmfile_state = None
def _setVerifiedAttr(self, attr, val, vattr=None):
if vattr is None:
vattr = "verify_" + attr
attr = "_" + attr
setattr(self, attr, val)
setattr(self, vattr, val is not None)
readlink = property(fget=lambda x: x._readlink,
fdel=lambda x: setattr(x, 'readlink', None),
fset=lambda x,y: x._setVerifiedAttr("readlink", y))
mtime = property(fget=lambda x: x._mtime,
fdel=lambda x: setattr(x, 'mtime', None),
fset=lambda x,y: x._setVerifiedAttr("mtime", y))
dev = property(fget=lambda x: x._dev,
fdel=lambda x: setattr(x, 'dev', None),
fset=lambda x,y: x._setVerifiedAttr("dev", y))
user = property(fget=lambda x: x._user,
fdel=lambda x: setattr(x, 'user', None),
fset=lambda x,y: x._setVerifiedAttr("user", y))
group = property(fget=lambda x: x._group,
fdel=lambda x: setattr(x, 'group', None),
fset=lambda x,y: x._setVerifiedAttr("group", y))
# Mode is special, because it's shared with ftype.
digest = property(fget=lambda x: x._digest,
fdel=lambda x: setattr(x, 'digest', None),
fset=lambda x,y: x._setVerifiedAttr("digest", y))
size = property(fget=lambda x: x._size,
fdel=lambda x: setattr(x, 'size', None),
fset=lambda x,y: x._setVerifiedAttr("size", y))
def _setVerifiedMode(self, attr, val):
self.verify_mode = val is not None
self.verify_ftype = val is not None
attr = "_" + attr
setattr(self, attr, val)
mode = property(fget=lambda x: x._mode,
fdel=lambda x: setattr(x, 'mode', None),
fset=lambda x,y: x._setVerifiedMode("mode", y))
@staticmethod
def _ftype(mode):
""" Given a "mode" return the name of the type of file. """
if stat.S_ISREG(mode): return "file"
if stat.S_ISDIR(mode): return "directory"
if stat.S_ISLNK(mode): return "symlink"
if stat.S_ISFIFO(mode): return "fifo"
if stat.S_ISCHR(mode): return "character device"
if stat.S_ISBLK(mode): return "block device"
return "<unknown>"
ftype = property(fget=lambda x: x._ftype(x.mode))
class _RPMVerifyPackageFile(YUMVerifyPackageFile):
def __init__(self, fi, filetuple, csum_type, override_vflags=False):
YUMVerifyPackageFile.__init__(self, filetuple[0])
flags = filetuple[4]
if override_vflags:
vflags = -1
else:
vflags = filetuple[9]
if vflags & _RPMVERIFY_FILESIZE:
self.size = filetuple[1]
if vflags & _RPMVERIFY_RDEV|_RPMVERIFY_MODE:
mode = filetuple[2]
if mode < 0:
# Stupid rpm, should be unsigned value but is signed ...
# so we "fix" it via. this hack
mode = (mode & 0xFFFF)
self.mode = mode
if not (vflags & _RPMVERIFY_MODE):
self.verify_mode = False
if not (vflags & _RPMVERIFY_RDEV):
self.verify_ftype = False
if vflags & _RPMVERIFY_MTIME:
self.mtime = filetuple[3]
if vflags & _RPMVERIFY_RDEV:
self.dev = filetuple[5]
self.rpmfile_types = rpmfile_types = set()
if flags & rpm.RPMFILE_CONFIG:
rpmfile_types.add('configuration')
if flags & rpm.RPMFILE_DOC:
rpmfile_types.add('documentation')
if flags & rpm.RPMFILE_GHOST:
rpmfile_types.add('ghost')
if flags & rpm.RPMFILE_LICENSE:
rpmfile_types.add('license')
if flags & rpm.RPMFILE_PUBKEY:
rpmfile_types.add('public key')
if flags & rpm.RPMFILE_README:
rpmfile_types.add('README')
if flags & rpm.RPMFILE_MISSINGOK:
rpmfile_types.add('missing ok')
# 6 == inode
# 7 == link
state = filetuple[8]
statemap = {rpm.RPMFILE_STATE_NORMAL : 'normal',
rpm.RPMFILE_STATE_REPLACED : 'replaced',
rpm.RPMFILE_STATE_NOTINSTALLED : 'not installed',
rpm.RPMFILE_STATE_WRONGCOLOR : 'wrong color',
rpm.RPMFILE_STATE_NETSHARED : 'netshared'}
if state in statemap:
self.rpmfile_state = statemap[state]
else:
self.rpmfile_state = "<unknown>"
if vflags & _RPMVERIFY_USER:
self.user = filetuple[10]
if vflags & _RPMVERIFY_GROUP:
self.group = filetuple[11]
if vflags & _RPMVERIFY_DIGEST:
self.digest = (csum_type, filetuple[12])
if self.ftype == 'symlink' and vflags & _RPMVERIFY_LINKTO:
self.readlink = fi.FLink() # fi.foo is magic, don't think about it
elif vflags & _RPMVERIFY_LINKTO:
self.readlink = ''
class YUMVerifyPackage:
""" A holder for YUMVerifyPackageFile objects. """
def __init__(self, po):
self.po = po
self._files = {}
def __contains__(self, fname):
""" Note that this checks if a filename is part of the package, and
not a full YUMVerifyPackageFile(). """
return fname in self._files
def __iter__(self):
for fn in self._files:
yield self._files[fn]
def add(self, vpf):
self._files[vpf.filename] = vpf
def remove(self, vpf):
del self._files[vpf.filename]
def discard(self, vpf):
if vpf.filename not in self:
return
self.remove(vpf)
def clear(self):
self._files = {}
_last_fnmatch = {}
class _RPMVerifyPackage(YUMVerifyPackage):
def __init__(self, po, fi, def_csum_type, patterns, all):
YUMVerifyPackage.__init__(self, po)
self._presetup = (fi, def_csum_type, patterns, all)
def _setup(self):
if not hasattr(self, '_presetup'):
return
(fi, def_csum_type, patterns, all) = self._presetup
del self._presetup
global _last_fnmatch
_this_fnmatch = {}
for ft in fi:
fn = ft[0]
if patterns:
matched = False
for p in patterns:
if p in _last_fnmatch:
match = _last_fnmatch[p]
elif p in _this_fnmatch:
match = _this_fnmatch[p]
else:
match = misc.compile_pattern(p)
_this_fnmatch[p] = match
if match(fn):
matched = True
break
if not matched:
continue
self.add(_RPMVerifyPackageFile(fi, ft, def_csum_type, all))
if _this_fnmatch:
_last_fnmatch = _this_fnmatch
def __contains__(self, *args, **kwargs):
self._setup()
return YUMVerifyPackage.__contains__(self, *args, **kwargs)
def __iter__(self, *args, **kwargs):
self._setup()
return YUMVerifyPackage.__iter__(self, *args, **kwargs)
def add(self, *args, **kwargs):
self._setup()
return YUMVerifyPackage.add(self, *args, **kwargs)
def remove(self, *args, **kwargs):
self._setup()
return YUMVerifyPackage.remove(self, *args, **kwargs)
# discard uses contains...
def clear(self, *args, **kwargs):
if hasattr(self, '_presetup'):
del self._presetup
return YUMVerifyPackage.clear(self, *args, **kwargs)
_installed_repo = FakeRepository('installed')
_installed_repo.cost = 0
class YumInstalledPackage(YumHeaderPackage):
"""super class for dealing with packages in the rpmdb"""
def __init__(self, hdr, yumdb=None):
YumHeaderPackage.__init__(self, _installed_repo, hdr)
if yumdb:
self.yumdb_info = yumdb.get_package(self)
def verify(self, patterns=[], deps=False, script=False,
fake_problems=True, all=False, fast=False, callback=None,
failfast=False):
"""verify that the installed files match the packaged checksum
optionally verify they match only if they are in the 'pattern' list
returns a tuple """
fi = self.hdr.fiFromHeader()
results = {} # fn = problem_obj?
# Use prelink_undo_cmd macro?
prelink_cmd = "/usr/sbin/prelink"
have_prelink = os.path.exists(prelink_cmd)
# determine what checksum algo to use:
csum_type = 'md5' # default for legacy
if hasattr(rpm, 'RPMTAG_FILEDIGESTALGO'):
csum_num = self.hdr[rpm.RPMTAG_FILEDIGESTALGO]
if csum_num:
if csum_num in RPM_CHECKSUM_TYPES:
csum_type = RPM_CHECKSUM_TYPES[csum_num]
# maybe an else with an error code here? or even a verify issue?
pfs = _RPMVerifyPackage(self, fi, csum_type, patterns, all)
if callback is not None:
pfs = callback(pfs)
for pf in pfs:
fn = pf.filename
ftypes = list(pf.rpmfile_types)
if pf.rpmfile_state != "normal":
ftypes.append("state=" + pf.rpmfile_state)
if fake_problems:
results[fn] = [_PkgVerifyProb('state',
'state is not normal',
ftypes, fake=True)]
continue
if 'missing ok' in pf.rpmfile_types and fake_problems:
results[fn] = [_PkgVerifyProb('missingok', 'missing but ok',
ftypes, fake=True)]
if 'missing ok' in pf.rpmfile_types and not all:
continue # rpm just skips missing ok, so we do too
if 'ghost' in pf.rpmfile_types and fake_problems:
results[fn] = [_PkgVerifyProb('ghost', 'ghost file', ftypes,
fake=True)]
if 'ghost' in pf.rpmfile_types and not all:
continue
# do check of file status on system
problems = []
if os.path.lexists(pf.filename):
# stat
my_st = os.lstat(pf.filename)
my_st_size = my_st.st_size
try:
my_user = pwd.getpwuid(my_st[stat.ST_UID])[0]
except KeyError, e:
my_user = 'uid %s not found' % my_st[stat.ST_UID]
try:
my_group = grp.getgrgid(my_st[stat.ST_GID])[0]
except KeyError, e:
my_group = 'gid %s not found' % my_st[stat.ST_GID]
my_ftype = YUMVerifyPackageFile._ftype(my_st.st_mode)
verify_dev = False
if (pf.verify_dev and (pf.ftype.endswith("device") or
my_ftype.endswith("device"))):
verify_dev = True
if verify_dev:
if pf.ftype != my_ftype:
prob = _PkgVerifyProb('type','file type does not match',
ftypes)
prob.database_value = pf.ftype
prob.disk_value = my_ftype
problems.append(prob)
elif (pf.dev & 0xFFFF) != (my_st.st_dev & 0xFFFF):
prob =_PkgVerifyProb('type','dev does not match',ftypes)
prob.database_value = hex(pf.dev & 0xffff)
prob.disk_value = hex(my_st.st_dev & 0xffff)
problems.append(prob)
if pf.verify_readlink:
my_fnl = ''
if my_ftype == "symlink":
my_fnl = os.readlink(pf.filename)
if my_fnl != pf.readlink:
prob = _PkgVerifyProb('symlink',
'symlink does not match', ftypes)
prob.database_value = pf.readlink
prob.disk_value = my_fnl
problems.append(prob)
check_content = True
if 'ghost' in ftypes:
check_content = False
if my_ftype == "symlink" and pf.ftype == "file":
# Don't let things hide behind symlinks
my_st_size = os.stat(pf.filename).st_size
elif my_ftype != "file":
check_content = False
check_perms = True
if my_ftype == "symlink":
# No, rpm doesn't check user/group on the dst. of the
# symlink ... so we don't.
check_perms = False
if (check_content and pf.verify_mtime and
int(my_st.st_mtime) != int(pf.mtime)):
prob = _PkgVerifyProb('mtime', 'mtime does not match',
ftypes)
prob.database_value = pf.mtime
prob.disk_value = int(my_st.st_mtime)
problems.append(prob)
if check_perms and pf.verify_user and my_user != pf.user:
prob = _PkgVerifyProb('user', 'user does not match', ftypes)
prob.database_value = pf.user
prob.disk_value = my_user
problems.append(prob)
if check_perms and pf.verify_group and my_group != pf.group:
prob = _PkgVerifyProb('group', 'group does not match',
ftypes)
prob.database_value = pf.group
prob.disk_value = my_group
problems.append(prob)
my_mode = my_st.st_mode
if 'ghost' in ftypes: # This is what rpm does, although it
my_mode &= 0777 # doesn't usually get here.
if check_perms and pf.verify_mode and my_mode != pf.mode:
prob = _PkgVerifyProb('mode', 'mode does not match', ftypes)
prob.database_value = pf.mode
prob.disk_value = my_st.st_mode
problems.append(prob)
verify_digest = pf.verify_digest
if fast and not problems and (my_st_size == pf.size):
verify_digest = False
if failfast and problems:
verify_digest = False
if not pf.digest:
verify_digest = False
# Note that because we might get the _size_ from prelink,
# we need to do the checksum, even if we just throw it away,
# just so we get the size correct.
if (check_content and
(verify_digest or (pf.verify_size and have_prelink and
my_st_size != pf.size))):
if pf.digest:
digest_type = pf.digest[0]
csum = pf.digest[0] + ':' + pf.digest[1]
else:
digest_type = csum_type
csum = ''
try:
my_csum = misc.checksum(digest_type, pf.filename)
my_csum = digest_type + ':' + my_csum
except Errors.MiscError:
# Don't have permission?
my_csum = None
if pf.verify_digest and my_csum is None:
prob = _PkgVerifyProb('genchecksum',
'checksum not available', ftypes)
prob.database_value = csum
prob.disk_value = None
problems.append(prob)
if my_csum != csum and have_prelink:
# This is how rpm -V works, try and if that fails try
# again with prelink.
p = Popen([prelink_cmd, "-y", pf.filename],
bufsize=-1, stdin=PIPE,
stdout=PIPE, stderr=PIPE, close_fds=True)
(ig, fp, er) = (p.stdin, p.stdout, p.stderr)
# er.read(1024 * 1024) # Try and get most of the stderr
fp = _CountedReadFile(fp)
tcsum = misc.checksum(digest_type, fp)
if fp.read_size: # If prelink worked
my_csum = tcsum
my_csum = digest_type + ':' + my_csum
my_st_size = fp.read_size
if pf.verify_digest and my_csum != csum and my_csum:
prob = _PkgVerifyProb('checksum',
'checksum does not match', ftypes)
prob.database_value = csum
prob.disk_value = my_csum
problems.append(prob)
# Size might be got from prelink ... *sigh*.
if check_content and pf.verify_size and my_st_size != pf.size:
prob = _PkgVerifyProb('size', 'size does not match', ftypes)
prob.database_value = pf.size
prob.disk_value = my_st_size
problems.append(prob)
else:
try:
os.stat(pf.filename)
perms_ok = True # Shouldn't happen
except OSError, e:
perms_ok = True
if e.errno == errno.EACCES:
perms_ok = False
if perms_ok:
prob = _PkgVerifyProb('missing', 'file is missing', ftypes)
else:
prob = _PkgVerifyProb('permissions-missing',
'file is missing (Permission denied)',
ftypes)
problems.append(prob)
if problems:
results[pf.filename] = problems
return results
class YumLocalPackage(YumHeaderPackage):
"""Class to handle an arbitrary package from a file path
this inherits most things from YumInstalledPackage because
installed packages and an arbitrary package on disk act very
much alike. init takes a ts instance and a filename/path
to the package."""
def __init__(self, ts=None, filename=None):
if ts is None:
# This shouldn't be used "normally" within yum, but is very useful
# for small scripts and debugging/etc.
ts = rpmUtils.transaction.initReadOnlyTransaction()
if filename is None:
raise Errors.MiscError, \
'No Filename specified for YumLocalPackage instance creation'
self.pkgtype = 'local'
self.localpath = filename
self._checksum = None
try:
hdr = rpmUtils.miscutils.hdrFromPackage(ts, self.localpath)
except RpmUtilsError, e:
raise Errors.MiscError, \
'Could not open local rpm file: %s: %s' % (self.localpath, e)
fakerepo = FakeRepository(filename)
fakerepo.cost = 0
YumHeaderPackage.__init__(self, fakerepo, hdr)
self.id = self.pkgid
self._stat = os.stat(self.localpath)
self.filetime = str(self._stat[-2])
self.packagesize = str(self._stat[6])
self.arch = self.isSrpm()
self.pkgtup = (self.name, self.arch, self.epoch, self.ver, self.rel)
self._hdrstart = None
self._hdrend = None
self.checksum_type = misc._default_checksums[0]
# these can be set by callers that need these features (ex: createrepo)
self._reldir = None
self._baseurl = ""
# self._packagenumber will be needed when we do sqlite creation here
def isSrpm(self):
if self.tagByName('sourcepackage') == 1 or not self.tagByName('sourcerpm'):
return 'src'
else:
return self.tagByName('arch')
def localPkg(self):
return self.localpath
def _do_checksum(self, checksum_type=None):
if checksum_type is None:
checksum_type = misc._default_checksums[0]
if not self._checksum:
self._checksum = misc.checksum(checksum_type, self.localpath)
self._checksums = [(checksum_type, self._checksum, 1)]
return self._checksum
checksum = property(fget=lambda self: self._do_checksum())
def returnChecksums(self):
self._do_checksum()
return self._checksums
def verifyLocalPkg(self):
""" don't bother "checking" the package matches itself. """
return True
def _get_header_byte_range(self):
"""takes an rpm file or fileobject and returns byteranges for location of the header"""
if self._hdrstart and self._hdrend:
return (self._hdrstart, self._hdrend)
fo = open(self.localpath, 'r')
#read in past lead and first 8 bytes of sig header
fo.seek(104)
# 104 bytes in
binindex = fo.read(4)
# 108 bytes in
(sigindex, ) = struct.unpack('>I', binindex)
bindata = fo.read(4)
# 112 bytes in
(sigdata, ) = struct.unpack('>I', bindata)
# each index is 4 32bit segments - so each is 16 bytes
sigindexsize = sigindex * 16
sigsize = sigdata + sigindexsize
# we have to round off to the next 8 byte boundary
disttoboundary = (sigsize % 8)
if disttoboundary != 0:
disttoboundary = 8 - disttoboundary
# 112 bytes - 96 == lead, 8 = magic and reserved, 8 == sig header data
hdrstart = 112 + sigsize + disttoboundary
fo.seek(hdrstart) # go to the start of the header
fo.seek(8,1) # read past the magic number and reserved bytes
binindex = fo.read(4)
(hdrindex, ) = struct.unpack('>I', binindex)
bindata = fo.read(4)
(hdrdata, ) = struct.unpack('>I', bindata)
# each index is 4 32bit segments - so each is 16 bytes
hdrindexsize = hdrindex * 16
# add 16 to the hdrsize to account for the 16 bytes of misc data b/t the
# end of the sig and the header.
hdrsize = hdrdata + hdrindexsize + 16
# header end is hdrstart + hdrsize
hdrend = hdrstart + hdrsize
fo.close()
self._hdrstart = hdrstart
self._hdrend = hdrend
return (hdrstart, hdrend)
hdrend = property(fget=lambda self: self._get_header_byte_range()[1])
hdrstart = property(fget=lambda self: self._get_header_byte_range()[0])
def _return_remote_location(self):
# if we start seeing fullpaths in the location tag - this is the culprit
if self._reldir and self.localpath.startswith(self._reldir):
relpath = self.localpath.replace(self._reldir, '')
if relpath[0] == '/': relpath = relpath[1:]
else:
relpath = self.localpath
if self._baseurl:
msg = """ <location xml:base="%s" href="%s"/>\n""" % (
misc.to_xml(self._baseurl, attrib=True),
misc.to_xml(relpath, attrib=True))
else:
msg = """ <location href="%s"/>\n""" % misc.to_xml(relpath, attrib=True)
return msg
class YumUrlPackage(YumLocalPackage):
"""Class to handle an arbitrary package from a URL
this inherits most things from YumLocalPackage, but will download a
remote package to make it local.
init takes a YumBase, a ts instance and a url to the package."""
def __init__(self, yb=None, ts=None, url=None, ua=None):
if url.lower().startswith("file:"):
result = url[len("file:"):]
elif not misc.re_remote_url(url):
result = url
else:
cb = None
pd = {}
for repo in yb.repos.listEnabled():
cb = repo.callback # Hacky, but these are "always" the same
if (repo.proxy == yb.conf.proxy and
repo.proxy_username == yb.conf.proxy_username and
repo.proxy_password == yb.conf.proxy_password):
# Even more hacky...
pd = repo.proxy_dict
break
fname = os.path.basename(url)
local = misc.getCacheDir()
if local is None: # bugger...
local = "%s/../" % repo.cachedir
local = "%s/%s" % (local, fname)
try:
ug = URLGrabber(bandwidth = yb.conf.bandwidth,
retry = yb.conf.retries,
throttle = yb.conf.throttle,
progress_obj = cb,
proxies=pd)
if ua is not None:
ug.opts.user_agent = ua
result = ug.urlgrab(url, local, text=fname)
except URLGrabError, e:
raise Errors.MiscError("Cannot download %s: %s" % (url, e))
YumLocalPackage.__init__(self, ts, result)
|
"""
This plugin sends a message to a channel when an area repops
"""
import time
from string import Template
from plugins.aardwolf._aardwolfbaseplugin import AardwolfBasePlugin
NAME = 'Aardwolf Repop'
SNAME = 'repop'
PURPOSE = 'Send repop messages to a channel'
AUTHOR = 'Bast'
VERSION = 1
AUTOLOAD = False
class Plugin(AardwolfBasePlugin):
"""
a plugin to show gmcp usage
"""
def __init__(self, *args, **kwargs):
"""
initialize the instance
"""
AardwolfBasePlugin.__init__(self, *args, **kwargs)
def load(self):
"""
load the plugins
"""
AardwolfBasePlugin.load(self)
self.api('setting.add')('channel', 'gt', str,
'the channel to send the repop message')
self.api('setting.add')('format',
"@r[@RRepop@r]@w ${zone} @R@@ @w${time}", str,
'the format of the message')
self.api('events.register')('GMCP:comm.repop', self.repop)
def repop(self, args):
"""
do something on repop
"""
zone = args['data']['zone']
ttime = time.strftime('%X', time.localtime())
chan = self.api('setting.gets')('channel')
templ = Template(self.api('setting.gets')('format'))
datan = templ.safe_substitute({'zone':zone, 'time':ttime})
self.api('send.execute')(chan + ' ' + datan)
|
"""Base email backend class."""
class BaseEmailBackend(object):
"""
Base class for email backend implementations.
Subclasses must at least overwrite send_messages().
open() and close() can be called indirectly by using a backend object as a
context manager:
with backend as connection:
# do something with connection
pass
"""
def __init__(self, fail_silently=False, **kwargs):
self.fail_silently = fail_silently
def open(self):
"""
Open a network connection.
This method can be overwritten by backend implementations to
open a network connection.
It's up to the backend implementation to track the status of
a network connection if it's needed by the backend.
This method can be called by applications to force a single
network connection to be used when sending mails. See the
send_messages() method of the SMTP backend for a reference
implementation.
The default implementation does nothing.
"""
pass
def close(self):
"""Close a network connection."""
pass
def __enter__(self):
try:
self.open()
except Exception:
self.close()
raise
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def send_messages(self, email_messages):
"""
Send one or more EmailMessage objects and return the number of email
messages sent.
"""
raise NotImplementedError('subclasses of BaseEmailBackend must override send_messages() method')
|
from django.shortcuts import render, render_to_response
from django.template.context import RequestContext
from .models import *
def linuxicerik(request, altbaslik):
icerikler = icerik.objects.get(altbaslik=altbaslik)
return render_to_response("linuxicerik.html", locals(), content_type=RequestContext(request))
def djangoicerik(request, altbaslik):
djangofiltre = icerik.objects.get(altbaslik=altbaslik)
return render_to_response("djangomenu.html", locals(), context_instance=RequestContext(request))
def iviriviricerik(request, altbaslik):
ivirzivirfiltre = icerik.objects.get(altbaslik=altbaslik)
return render_to_response("ivirzivir.html", locals(), context_instance=RequestContext(request))
|
"""Flask :class:`~flask.sessions.SessionInterface` implementation."""
import six
from datetime import timedelta, datetime
from flask import current_app, request
from flask.helpers import locked_cached_property
from flask.sessions import SessionInterface as FlaskSessionInterface
from uuid import uuid4
from werkzeug.exceptions import BadRequest
from werkzeug.utils import import_string
from invenio.utils.serializers import ZlibPickle as Serializer
class SessionInterface(FlaskSessionInterface):
"""Extend :class:`~flask.sessions.SessionInterface` class."""
@locked_cached_property
def has_secure_url(self):
"""Return ``True`` if secure url is configured."""
return current_app.config.get('CFG_SITE_SECURE_URL', '').\
startswith("https://")
@locked_cached_property
def serializer(self):
"""Return serializer class."""
serializer_string = current_app.config.get('SESSION_SERIALIZER',
Serializer)
return import_string(serializer_string)() \
if isinstance(serializer_string, six.string_types) \
else serializer_string()
@locked_cached_property
def session_class(self):
"""Return session class."""
session_class_string = current_app.config.get(
'SESSION_CLASS', 'invenio.ext.session.legacy_session:Session')
return import_string(session_class_string) \
if isinstance(session_class_string, six.string_types) \
else session_class_string
@locked_cached_property
def backend(self):
"""Return session backend."""
storage_string = current_app.config.get(
'SESSION_BACKEND', 'invenio.ext.session.backends.cache:Storage')
return import_string(storage_string)() \
if isinstance(storage_string, six.string_types) \
else storage_string()
def generate_sid(self):
"""Generate unique session identifier."""
sid = uuid4().hex
return sid
def get_session_expiration_time(self, app, session):
"""Return session expiration time."""
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=1)
def open_session(self, app, request):
"""Return session instance."""
sid = request.cookies.get(app.session_cookie_name) or \
request.args.get('session_id')
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid)
try:
data = self.backend.get(sid)
if data:
session = self.session_class(self.serializer.loads(data),
sid=sid)
if session.check_ip(request):
return session
except:
current_app.logger.warning(
"Load session error. Returning empty session.",
exc_info=True)
return self.session_class(sid=sid)
def save_session(self, app, session, response):
"""Save current session."""
domain = self.get_cookie_domain(app)
if not session:
current_app.logger.debug("Empty session: " + str(request.url))
return
# response.delete_cookie(app.session_cookie_name,
# domain=domain)
# response.delete_cookie(app.session_cookie_name + 'stub',
# domain=domain)
# return
timeout = self.get_session_expiration_time(app, session)
session_expiry = datetime.utcnow() + timeout
max_age = cookie_expiry = None
uid = session.uid
if uid > -1 and session.permanent:
max_age = app.permanent_session_lifetime
cookie_expiry = session_expiry
sid = session.sid
if session.logging_in:
# # FIXME Do we really need to delete the session after login?
# # The user just logged in, better change the session ID
# sid = self.generate_sid()
# flashes = get_flashed_messages(with_categories=True)
# # And remove the cookie that has been set
# self.backend.delete(session.sid)
# session.clear()
# response.delete_cookie(app.session_cookie_name, domain=domain)
# response.delete_cookie(app.session_cookie_name + 'stub',
# domain=domain)
# session.sid = sid
# session.uid = uid
# # Fixes problem with lost flashes after login.
# map(lambda (cat, msg): flash(msg, cat), flashes)
pass
# Set all user id keys for compatibility.
if len(session.keys()) == 1 and '_id' in session:
session.delete()
return
elif not session.modified:
return
session.uid = uid
session.save_ip(request)
self.backend.set(sid,
self.serializer.dumps(dict(session)),
timeout=timeout)
if not self.has_secure_url:
response.set_cookie(app.session_cookie_name, sid,
expires=cookie_expiry, httponly=True,
domain=domain, max_age=max_age)
elif session.uid > 0:
# User is authenticated, we shall use HTTPS then
if request.scheme == 'https':
response.set_cookie(app.session_cookie_name, sid,
expires=cookie_expiry, httponly=True,
domain=domain, secure=True,
max_age=max_age)
response.set_cookie(app.session_cookie_name + 'stub', 'HTTPS',
expires=cookie_expiry, httponly=True,
domain=domain, max_age=max_age)
else:
raise BadRequest("The user is being authenticated over HTTP "
"rather than HTTPS?")
else:
response.set_cookie(app.session_cookie_name, sid, httponly=True,
domain=domain)
response.set_cookie(app.session_cookie_name + 'stub', 'NO',
httponly=True, domain=domain)
|
import log
class Command:
"Extracts protocol, id, command, and arg from mail filter input"
def __init__(self, user='', liszt = []):
#log.logger.debug('Command.__init__()')
self.proto = 'IRC'
self.id = user
self.cmd = 'NOOP'
self.arg = ''
self.handsflag = False
noarg = ['ABORT', 'AUTOFOLD', 'BACK', 'BOARD', 'CARDS', 'CHECK',
'COLOR', 'COMMANDS', 'FOLD', 'HELP', 'JOIN',
'POSITION', 'QUIT', 'START', 'STATUS', 'UNDO']
chararg = ['REMIND', 'VACATION']
intarg = ['BANKROLL', 'BET', 'BLIND', 'CALL', 'DOUBLE', 'MAKE',
'RAISE']
onearg = chararg + intarg
if len(liszt) > 0:
self.cmd = liszt[1].upper()
if self.cmd in onearg:
if len(liszt) < 3:
if self.cmd == 'CALL':
self.cmd = 'CALLMAX'
elif self.cmd == 'VACATION':
pass
else:
self.arg = 'ERROR'
else:
if self.cmd in intarg:
try:
self.arg = int(liszt[2])
except:
self.arg = 'ERROR'
if self.cmd == 'DOUBLE' and len(liszt) >= 4:
if liszt[3].upper() == 'HANDS':
self.handsflag = True
else:
self.arg = liszt[2]
def __str__(self):
#log.logger.debug('Command.__str__()')
return '[Command:%s:%s:%s:%s]' %\
(self.proto, self.id, self.cmd, self.arg)
def extractcmd(self, cmdstring):
#log.logger.debug('Command.extractcmd()')
if cmdstring:
try:
self.proto, self.id, self.cmd, self.arg =\
cmdstring.strip().split(':', 3)
except:
log.logger.warning('Command.extractcmd(): bad command format: \'%s\'' % cmdstring)
self.proto = 'EMAIL'
self.id = ''
self.cmd = 'NOOP'
self.arg = ''
self.cmd = self.cmd.upper()
if self.cmd == 'CALL' and self.arg == 'MAXIMUM':
self.cmd = 'CALLMAX'
def goodarg(self):
'Format Command.arg'
goodarg = True;
#log.logger.debug('Command.goodarg()')
intargcmds = ['BET', 'CALL', 'MAKE', 'RAISE', 'BLIND',
'DOUBLE', 'BANKROLL']
txtargcmds = ['WAIT', 'REMIND']
if self.cmd in intargcmds:
log.logger.debug('Command:%s' % self)
try:
self.arg = int(self.arg)
except:
self.arg = ''
if self.cmd == 'CALL':
log.logger.debug('Command:Setting cmd to CALLMAX')
self.cmd = 'CALLMAX'
# In NLHE, BET/MAKE, RAISE must have arguments
else:
goodarg = False
elif self.cmd in txtargcmds:
if not self.arg:
goodarg = False
return goodarg
if __name__ == '__main__':
acommand = Command()
print acommand
acommand.extractcmd('EMAIL:abcdefgh:call:199')
print acommand
# this should fail
acommand.extractcmd('IRC:abcdefgh:call')
print acommand
|
"""
Build word vector clusters for datasets
input: dataset
word vecs
output: vec clusters(classes)
centroid_map
"""
import doc2vec
try:
import cPickle as pickle
except ImportError:
import pickle
def build_clusters(data_folder, path, dataset):
print "Building clusters on", dataset
vocab_name = path + dataset + ".vocab"
d2v_model = doc2vec.load_docs(data_folder, clean_string=True, vocab_name=vocab_name, save_vocab=True)
w2v_file = './datasets/wordvecs/GoogleNews-vectors-negative300.bin'
# w2v_file = './datasets/wordvecs/vectors.bin'
w2v_model = doc2vec.load_word_vec(w2v_file, d2v_model.vocab, cluster=True)
sname = path + dataset + ".clusters"
w2v_model.get_w2v_centroid(sname=sname)
centroid_map = path + dataset + "-centroid-map.p"
with open(centroid_map, "wb") as f:
pickle.dump([w2v_model.word_centroid_map], f) # create a pickle object
print
print "Creating word vector clusters for datasets"
path = './datasets/'
dataset = 'rt-polarity'
data_folder = [path+dataset+".pos", path+dataset+".neg"]
build_clusters(data_folder, path, dataset)
dataset = 'subj'
data_folder = [path+dataset+".subjective", path+dataset+".objective"]
build_clusters(data_folder, path, dataset)
dataset = 'mpqa'
data_folder = [path+dataset+".pos", path+dataset+".neg"]
build_clusters(data_folder, path, dataset)
print "Done!"
|
from __future__ import unicode_literals
from collections import Counter
from molbiox.algor import interval
def find_next_contigs(samfile, contig, insertmax, orientation='fr'):
"""
:param samfile: pysam.calignmentfile.AlignmentFile object
:param contig: contig id (an integer)
:param insertmax: maximun allowed insert size
:param orientation: fr / rf
:param direction: prev / next
:return:
"""
length = samfile.lengths[contig]
if direction == 'prev':
headpos = 0
tailpos = min(insertmax, length)
elif direction == 'next':
headpos = max(0, length-insertmax)
tailpos = length
else:
raise ValueError('direction can only be prev or next')
reads = samfile.fetch(contig, headpos, tailpos)
reads = (r for r in reads if r.is_paired and not r.mate_is_unmapped)
# should read be reversed?
revdict = dict(prevfr=True, prevrf=False, nextfr=False, nextrf=True)
needrev = revdict[direction + orientation]
reads = (r for r in reads if bool(r.is_reverse) == needrev)
valid_pairs = []
for read in reads:
# this line, VERY SLOW!
mate = samfile.mate(read)
if read.reference_id == mate.reference_id:
continue
mcontig = mate.reference_id
mlength = samfile.lengths[mcontig]
if not isinstance(mate.reference_start, int):
continue
if not isinstance(mate.reference_end, int):
continue
if mate.is_reverse and orientation == 'fr' or \
not mate.is_reverse and orientation == 'rf':
mheadpos = 0
mtailpos = min(insertmax, mlength)
else:
mheadpos = max(0, length-insertmax)
mtailpos = mlength
# overlap size
ov = interval.overlap_size(
mheadpos, mtailpos, mate.reference_start, mate.reference_end)
if ov > 0:
valid_pairs.append((read, mate))
refs = samfile.references
strand = lambda r, m: 'diff' if r.is_reverse == m.is_reverse else 'same'
nextcontigs = ((refs[m.reference_id], strand(r, m)) for r, m in valid_pairs)
return Counter(nextcontigs)
|
import cgi
def printHeader( title ):
print """Content-type: text/html
<?xml version = "1.0" encoding = "UTF-8"?>
<!DOCTYPE html PUBLIC
"-//W3C//DTD XHTML 1.0 Strict//EN"
"DTD/xhtml1-strict.dtd">
<html xmlns = "http://www.w3.org/1999/xhtml">
<head><title>%s</title></head>
<body>""" % title
printHeader( "Using 'get' with forms" )
print """<p>Enter one of your favorite words here:<br /></p>
<form method = "get" action = "fig06_08.py">
<p>
<input type = "text" name = "word" />
<input type = "submit" value = "Submit word" />
</p>
</form>"""
pairs = cgi.parse()
if pairs.has_key( "word" ):
print """<p>Your word is:
<span style = "font-weight: bold">%s</span></p>""" \
% cgi.escape( pairs[ "word" ][ 0 ] )
print "</body></html>"
|
import logging
import sys
from pyanaconda.ui.gui.spokes import NormalSpoke
from pyanaconda.ui.common import FirstbootOnlySpokeMixIn
from pyanaconda.ui.categories.system import SystemCategory
from pyanaconda.ui.gui.utils import really_hide
log = logging.getLogger(__name__)
RHSM_PATH = "/usr/share/rhsm"
sys.path.append(RHSM_PATH)
from subscription_manager import ga_loader
ga_loader.init_ga(gtk_version="3")
from subscription_manager.ga import GObject as ga_GObject
from subscription_manager.ga import Gtk as ga_Gtk
from subscription_manager.gui import managergui
from subscription_manager.injectioninit import init_dep_injection
from subscription_manager import injection as inj
from subscription_manager.gui import registergui
from subscription_manager import utils
from subscription_manager.gui import utils as gui_utils
ga_GObject.threads_init()
__all__ = ["RHSMSpoke"]
class RHSMSpoke(FirstbootOnlySpokeMixIn, NormalSpoke):
buildrObjects = ["RHSMSpokeWindow"]
mainWidgetName = "RHSMSpokeWindow"
uiFile = "rhsm_gui.ui"
helpFile = "SubscriptionManagerSpoke.xml"
category = SystemCategory
icon = "subscription-manager"
title = "Subscription Manager"
def __init__(self, data, storage, payload, instclass):
NormalSpoke.__init__(self, data, storage, payload, instclass)
self._done = False
self._addon_data = self.data.addons.com_redhat_subscription_manager
def initialize(self):
NormalSpoke.initialize(self)
self._done = False
init_dep_injection()
facts = inj.require(inj.FACTS)
backend = managergui.Backend()
self.info = registergui.RegisterInfo()
self.register_widget = registergui.RegisterWidget(backend, facts,
reg_info=self.info,
parent_window=self.main_window)
self.register_box = self.builder.get_object("register_box")
self.button_box = self.builder.get_object('navigation_button_box')
self.proceed_button = self.builder.get_object('proceed_button')
self.back_button = self.builder.get_object('cancel_button')
self.register_box.pack_start(self.register_widget.register_widget,
True, True, 0)
# Hook up the nav buttons in the gui
# TODO: add a 'start over'?
self.proceed_button.connect('clicked', self._on_register_button_clicked)
self.back_button.connect('clicked', self._on_back_button_clicked)
# initial-setup will likely
self.register_widget.connect('finished', self._on_finished)
self.register_widget.connect('register-finished', self._on_register_finished)
self.register_widget.connect('register-error', self._on_register_error)
self.register_widget.connect('register-message', self._on_register_message)
# update the 'next/register button on page change'
self.register_widget.connect('notify::register-button-label',
self._on_register_button_label_change)
self.register_widget.connect('notify::screen-ready',
self._on_register_screen_ready_change)
self.register_box.show_all()
self.register_widget.initialize()
@property
def ready(self):
"""A boolean property indicating the spoke is ready to be visited.
This could depend on other modules or waiting for internal
state to be setup."""
return True
@property
def completed(self):
"""A boolean property indicating if all the mandatory actions are completed."""
# TODO: tie into register_widget.info.register-state
return self._done
@property
def mandatory(self):
"""A boolean property indicating if the module has to be completed before initial-setup is done."""
return False
@property
def status(self):
"""A string property indicating a user facing summary of the spokes status.
This is displayed under the spokes name on it's hub."""
# The status property is only used read/only, so no setter required.
return self.info.get_property('register-status')
def refresh(self):
"""Update gui widgets to reflect state of self.data.
This is called whenever a user returns to a Spoke to update the
info displayed, since the data could have been changed or updated
by another spoke or by actions that completed in the mean time.
Here it is used to populate RHSMSpokes registerGui.RegisterInfo self.info,
since changes there are applied to RegisterWidget self.register_widget
by RegisterWidget itself.
The RHSM 'ks' spoke can read values from the kickstart files read by
initial-setup, and stored in self._addon_data. So this method will
seed RHSMSpokes gui with any values set there.
"""
if self._addon_data.serverurl:
(hostname, port, prefix) = utils.parse_server_info(self._addon_data.serverurl)
self.info.set_property('hostname', hostname)
self.info.set_property('port', port)
self.info.set_property('prefix', prefix)
if self._addon_data.username:
self.info.set_property('username',
self._addon_data.username)
if self._addon_data.password:
self.info.set_property('password',
self._addon_data.password)
if self._addon_data.org:
self.info.set_property('owner_key',
self._addon_data.org)
if self._addon_data.activationkeys:
self.info.set_property('activation_keys',
self._addon_data.activationkeys)
# TODO: support a ordered list of sla preferences?
if self._addon_data.servicelevel:
# NOTE: using the first sla in servicelevel only currently
self.info.set_property('preferred_sla',
self._addon_data.servicelevel[0])
if self._addon_data.force:
self.info.set_property('force', True)
self.register_widget.populate_screens()
# take info from the gui widgets and set into the self.data
def apply(self):
"""Take info from the gui widgets and set into the self.data.addons AddonData.
self.data.addons will be used to persist the values into a
initial-setup-ks.cfg file when initial-setup completes."""
# TODO: implement
pass
def execute(self):
"""When the spoke is left, this can run anything that needs to happen.
For RHSMSpoke, the spoke has already done everything it needs to do,
so this is empty. Typically a module would gather enough info to
perform all the actions in the execute(), but RHSMSpoke is not typical."""
pass
def _on_back_button_clicked(self, button):
"""Handler for self.back_buttons 'clicked' signal.
Clear out any user set values and return to the start screen."""
self.register_widget.emit('back')
# TODO: clear out settings and restart?
# TODO: attempt to undo the REST api calls we've made?
#self.register_widget.set_initial_screen()
#self.register_widget.clear_screens()
def _on_register_button_clicked(self, button):
"""Handler for self.proceed_buttons 'clicked' signal.
The proceed and reset buttons in the RHSM spokes window
are used to drive the registergui.RegisterWidget by
emitting a 'proceed' signal to RegisterWidget when the
'proceed' button in the spoke window is clicked."""
self.clear_info()
self.register_widget.emit('proceed')
def _on_finished(self, obj):
"""Handler for RegisterWidget's 'finished' signal."""
self._done = True
really_hide(self.button_box)
# If we completed registration, that's close enough to consider
# completed.
def _on_register_finished(self, obj):
"""Handler for RegisterWidget's 'register-finished' signal.
Indicates the system successfully registered.
Note: It does mean the system has finished attaching
subscriptions, or that RegisterWidget is finished.
It only indicates the registration portion is finished."""
self._done = True
# May merge error and message handling, but error can
# include tracebacks and alter program flow...
def _on_register_error(self, widget, msg, exc_info):
"""Handler for RegisterWidget's 'register-error' signal.
Depending on the data passed to 'register-error' emission, this
widgets decides how to format the error 'msg'. The 'msg' may
need to use format_exception to including exception info
in the msg (ie, the exception msg or status code).
This uses initial-setups set_error() to display the error
messages. Currently that is via a Gtk.InfoBar."""
if exc_info:
formatted_msg = gui_utils.format_exception(exc_info, msg)
self.set_error(formatted_msg)
else:
log.error(msg)
self.set_error(msg)
def _on_register_message(self, widget, msg, msg_type=None):
"""Handler for RegisterWidget's 'register-message' signal.
If RegisterWidget needs the parent widget to show an info
or warning message, it emits 'register-message' with the
msg string and msg_type (a Gtk.MessageType).
This uses initial-setups set_info() or set_warning() to
display the message. Currently that is via a Gtk.InfoBar."""
# default to info.
msg_type = msg_type or ga_Gtk.MessageType.INFO
if msg_type == ga_Gtk.MessageType.INFO:
self.set_info(msg)
elif msg_type == ga_Gtk.MessageType.WARNING:
self.set_warning(msg)
def _on_register_screen_ready_change(self, obj, value):
ready = self.register_widget.current_screen.get_property('ready')
self.proceed_button.set_sensitive(ready)
self.back_button.set_sensitive(ready)
def _on_register_button_label_change(self, obj, value):
"""Handler for registergui.RegisterWidgets's 'register-button-label' property notifications.
Used to update the label on the proceed/register/next button in RHSMSpoke
to reflect RegisterWidget's state. (ie, 'Register', then 'Attach', etc)."""
register_label = obj.get_property('register-button-label')
if register_label:
self.proceed_button.set_label(register_label)
|
from passlib.hash import pbkdf2_sha512
from omf.model.dbo import db
from omf.common.userRole import Role
from flask_login import UserMixin
class User(db.Model, UserMixin):
__tablename__ = "users"
id = db.Column(db.Integer, nullable=False, primary_key=True)
username = db.Column(db.String(80), nullable=False)
reg_key = db.Column(db.String(80), nullable =True)
timestamp = db.Column(db.TIMESTAMP(timezone=True), nullable =True)
registered = db.Column(db.Boolean, nullable=True)
csrf = db.Column(db.String(80), nullable =True)
password_digest = db.Column(db.String(200), nullable =True)
role = db.Column(db.Enum('admin', 'user', 'public', name='role'))
def __init__(self, username, reg_key = None, timestamp = None, registered = None, csrf = None, password_digest = None, role = Role.USER):
self.username = username
self.reg_key = reg_key
self.timestamp = timestamp
self.registered = registered
self.csrf = csrf
self.password_digest = password_digest
if isinstance(role, Role):
self.role = role.value
else:
self.role = role
def get_id(self): return self.username
def get_user_id(self):
return self.id
def verify_password(self, password):
return pbkdf2_sha512.verify(password, self.password_digest)
@staticmethod
def hash_password(password):
return pbkdf2_sha512.encrypt(password)
|
import os
import errno
import json
from datetime import timedelta
from gi.repository import GLib, GObject, Gtk
from .clocks import Clock
from .utils import Alert, Dirs, LocalizedWeekdays, SystemSettings, TimeString, WallClock
from .widgets import Toolbar, ToolButton, SymbolicToolButton, SelectableIconView, ContentView
wallclock = WallClock.get_default()
class AlarmsStorage:
def __init__(self):
self.filename = os.path.join(Dirs.get_user_data_dir(), "alarms.json")
def save(self, alarms):
alarm_list = []
for a in alarms:
d = {
"name": a.name,
"hour": a.hour,
"minute": a.minute,
"days": a.days,
"active": a.active
}
alarm_list.append(d)
with open(self.filename, 'w', encoding='utf-8') as f:
json.dump(alarm_list, f, ensure_ascii=False)
def load(self):
alarms = []
try:
with open(self.filename, encoding='utf-8') as f:
alarm_list = json.load(f)
for a in alarm_list:
try:
n, h, m, d = (a['name'], int(a['hour']), int(a['minute']), a['days'])
# support the old format that didn't have the active key
active = a['active'] if 'active' in a else True
except:
# skip alarms which do not have the required fields
continue
alarm = AlarmItem(n, h, m, d, active)
alarms.append(alarm)
except IOError as e:
if e.errno == errno.ENOENT:
# File does not exist yet, that's ok
pass
return alarms
class AlarmItem:
EVERY_DAY = [0, 1, 2, 3, 4, 5, 6]
# TODO: For now the alarm never rings that long
MAX_RING_DURATION = timedelta(minutes=5)
class State:
READY = 0
RINGING = 1
SNOOZING = 2
def __init__(self, name, hour, minute, days, active):
self.name = name
self.hour = hour
self.minute = minute
self.days = days # list of numbers, 0 == Monday
self.active = active
self._reset()
self.alarm_time_string = TimeString.format_time(self.alarm_time)
self.alarm_repeat_string = self._get_alarm_repeat_string()
self.alert = Alert("alarm-clock-elapsed", _("Alarm"), name)
# two alarms are equal if they have the same name, time and days,
# the active attribute doesn't matter
def __eq__(self, other):
return self.name == other.name and \
self.hour == other.hour and \
self.minute == other.minute and \
self.days == other.days
def __ne__(self, other):
return not self.__eq__(other)
def _update_alarm_time(self):
now = wallclock.datetime
dt = now.replace(hour=self.hour, minute=self.minute, second=0, microsecond=0)
# check if it can ring later today
if dt.weekday() not in self.days or dt <= now:
# otherwise if it can ring this week
next_days = [d for d in self.days if d > dt.weekday()]
if next_days:
dt += timedelta(days=(next_days[0] - dt.weekday()))
# otherwise next week
else:
dt += timedelta(weeks=1, days=(self.days[0] - dt.weekday()))
self.alarm_time = dt
def _update_snooze_time(self, start_time):
self.snooze_time = start_time + timedelta(minutes=9)
def _get_alarm_repeat_string(self):
n = len(self.days)
if n == 0:
return ""
elif n == 1:
return LocalizedWeekdays.get_plural(self.days[0])
elif n == 7:
return _("Every day")
elif self.days == [0, 1, 2, 3, 4]:
return _("Weekdays")
else:
days = []
for i in range(7):
day_num = (LocalizedWeekdays.first_weekday() + i) % 7
if day_num in self.days:
days.append(LocalizedWeekdays.get_abbr(day_num))
return ", ".join(days)
def _reset(self):
self._update_alarm_time()
self._update_snooze_time(self.alarm_time)
self.state = AlarmItem.State.READY
def _ring(self):
self.alert.show()
self.state = AlarmItem.State.RINGING
def set_active(self, active):
if active:
self._reset()
elif self.state == AlarmItem.State.RINGING:
self.alert.stop()
self.active = active
def snooze(self):
self.alert.stop()
self.state = AlarmItem.State.SNOOZING
def stop(self):
self.alert.stop()
self._update_snooze_time(self.alarm_time)
self.state = AlarmItem.State.READY
def tick(self):
# Updates the state and ringing times of the AlarmItem and
# rings or stops the alarm as required, depending on the
# current time. Returns True if the state changed, False
# otherwise.
if not self.active:
return False
last_state = self.state
if self.state != AlarmItem.State.RINGING:
if wallclock.datetime >= self.alarm_time:
self._ringing_start_time = self.alarm_time
self._update_snooze_time(self.alarm_time)
self._update_alarm_time()
self._ring()
elif wallclock.datetime >= self.snooze_time:
self._ringing_start_time = self.snooze_time
self._update_snooze_time(self.snooze_time)
self._ring()
elif wallclock.datetime >= self._ringing_start_time + \
AlarmItem.MAX_RING_DURATION:
# give up and stop ringing after 5 minutes
self.stop()
return self.state != last_state
class AlarmDialog(Gtk.Dialog):
def __init__(self, parent, alarm=None):
if alarm:
Gtk.Dialog.__init__(self, _("Edit Alarm"), parent)
else:
Gtk.Dialog.__init__(self, _("New Alarm"), parent)
self.set_border_width(6)
self.parent = parent
self.set_transient_for(parent)
self.set_modal(True)
self.day_buttons = []
content_area = self.get_content_area()
self.add_buttons(Gtk.STOCK_CANCEL, 0, Gtk.STOCK_SAVE, 1)
self.cf = SystemSettings.get_clock_format()
grid = Gtk.Grid()
grid.set_row_spacing(9)
grid.set_column_spacing(6)
grid.set_border_width(6)
content_area.pack_start(grid, True, True, 0)
if alarm:
h = alarm.hour
m = alarm.minute
name = alarm.name
days = alarm.days
else:
t = wallclock.localtime
h = t.tm_hour
m = t.tm_min
name = _("New Alarm")
days = []
# Translators: "Time" in this context is the time an alarm
# is set to go off (days, hours, minutes etc.)
label = Gtk.Label(_("Time"))
label.set_alignment(1.0, 0.5)
grid.attach(label, 0, 0, 1, 1)
self.hourselect = Gtk.SpinButton()
self.hourselect.set_numeric(True)
self.hourselect.set_increments(1.0, 1.0)
self.hourselect.set_wrap(True)
grid.attach(self.hourselect, 1, 0, 1, 1)
label = Gtk.Label(": ")
label.set_alignment(0.5, 0.5)
grid.attach(label, 2, 0, 1, 1)
self.minuteselect = Gtk.SpinButton()
self.minuteselect.set_numeric(True)
self.minuteselect.set_increments(1.0, 1.0)
self.minuteselect.set_wrap(True)
self.minuteselect.connect('output', self._show_leading_zeros)
self.minuteselect.set_range(0.0, 59.0)
self.minuteselect.set_value(m)
grid.attach(self.minuteselect, 3, 0, 1, 1)
if self.cf == "12h":
self.ampm = Gtk.ComboBoxText()
self.ampm.append_text("AM")
self.ampm.append_text("PM")
if h < 12:
self.ampm.set_active(0) # AM
else:
self.ampm.set_active(1) # PM
h -= 12
if h == 0:
h = 12
grid.attach(self.ampm, 4, 0, 1, 1)
self.hourselect.set_range(1.0, 12.0)
self.hourselect.set_value(h)
gridcols = 5
else:
self.hourselect.set_range(0.0, 23.0)
self.hourselect.set_value(h)
gridcols = 4
label = Gtk.Label(_("Name"))
label.set_alignment(1.0, 0.5)
grid.attach(label, 0, 1, 1, 1)
self.entry = Gtk.Entry()
self.entry.set_text(name)
self.entry.set_editable(True)
grid.attach(self.entry, 1, 1, gridcols - 1, 1)
label = Gtk.Label(_("Repeat Every"))
label.set_alignment(1.0, 0.5)
grid.attach(label, 0, 2, 1, 1)
# create a box and put repeat days in it
box = Gtk.Box(True, 0)
box.get_style_context().add_class("linked")
for i in range(7):
day_num = (LocalizedWeekdays.first_weekday() + i) % 7
day_name = LocalizedWeekdays.get_abbr(day_num)
btn = Gtk.ToggleButton(label=day_name)
btn.data = day_num
if btn.data in days:
btn.set_active(True)
box.pack_start(btn, True, True, 0)
self.day_buttons.append(btn)
grid.attach(box, 1, 2, gridcols - 1, 1)
def _show_leading_zeros(self, spin_button):
spin_button.set_text('{:02d}'.format(spin_button.get_value_as_int()))
return True
def get_alarm_item(self):
name = self.entry.get_text()
h = self.hourselect.get_value_as_int()
m = self.minuteselect.get_value_as_int()
if self.cf == "12h":
r = self.ampm.get_active()
if r == 0 and h == 12:
h = 0
elif r == 1 and h != 12:
h += 12
days = []
for btn in self.day_buttons:
if btn.get_active():
days.append(btn.data)
# needed in case the first day of the week is not 0 (Monday)
days.sort()
# if no days were selected, create a daily alarm
if not days:
days = AlarmItem.EVERY_DAY
alarm = AlarmItem(name, h, m, days, True)
return alarm
class AlarmStandalone(Gtk.EventBox):
def __init__(self, view):
Gtk.EventBox.__init__(self)
self.get_style_context().add_class('view')
self.get_style_context().add_class('content-view')
self.view = view
self.can_edit = True
self.alarm_label = Gtk.Label()
self.alarm_label.set_hexpand(True)
self.alarm_label.set_alignment(0.5, 0.5)
self.alarm_label.set_halign(Gtk.Align.CENTER)
self.repeat_label = Gtk.Label()
self.repeat_label.set_alignment(0.5, 0.5)
self.left_button = Gtk.Button()
self.left_button.get_style_context().add_class("clocks-stop")
self.left_button.set_size_request(200, -1)
left_label = Gtk.Label()
self.left_button.add(left_label)
self.right_button = Gtk.Button()
self.right_button.set_size_request(200, -1)
right_label = Gtk.Label()
self.right_button.add(right_label)
left_label.set_markup("<span font_desc=\"18.0\">%s</span>" % (_("Stop")))
left_label.set_padding(6, 0)
right_label.set_markup("<span font_desc=\"18.0\">%s</span>" % (_("Snooze")))
right_label.set_padding(6, 0)
self.left_button.connect('clicked', self._on_stop_clicked)
self.right_button.connect('clicked', self._on_snooze_clicked)
self.switch = Gtk.Switch()
self.switch.show()
self.switch.set_halign(Gtk.Align.CENTER)
self.switch.set_valign(Gtk.Align.START)
self.switch.connect("notify::active", self._on_switch)
buttons = Gtk.Box()
buttons.show()
buttons.set_halign(Gtk.Align.CENTER)
buttons.set_valign(Gtk.Align.START)
buttons.pack_start(self.left_button, True, True, 0)
buttons.pack_start(Gtk.Label(), True, True, 12)
buttons.pack_start(self.right_button, True, True, 0)
self.controls_notebook = Gtk.Notebook()
self.controls_notebook.set_margin_top(24)
self.controls_notebook.set_show_tabs(False)
self.controls_notebook.append_page(self.switch, None)
self.controls_notebook.append_page(buttons, None)
label_top = Gtk.Label()
label_top.set_vexpand(True)
label_bottom = Gtk.Label()
label_bottom.set_vexpand(True)
label_padding = Gtk.Label()
label_padding.set_size_request(-1, 30)
grid = Gtk.Grid()
grid.set_orientation(Gtk.Orientation.VERTICAL)
grid.add(label_top)
grid.add(label_padding)
grid.add(self.alarm_label)
grid.add(self.repeat_label)
grid.add(self.controls_notebook)
grid.add(label_bottom)
self.add(grid)
self.alarm = None
def set_alarm(self, alarm):
self.alarm = alarm
timestr = self.alarm.alarm_time_string
repeat = self.alarm.alarm_repeat_string
self.alarm_label.set_markup(
"<span size='72000' color='dimgray'><b>%s</b></span>" % timestr)
self.repeat_label.set_markup(
"<span size='large' color='dimgray'><b>%s</b></span>" % repeat)
is_ready = alarm.state == AlarmItem.State.READY
is_ringing = alarm.state == AlarmItem.State.RINGING
self.left_button.set_sensitive(not is_ready)
self.right_button.set_sensitive(is_ringing)
self.switch.set_active(alarm.active)
self.controls_notebook.set_current_page(0 if is_ready else 1)
self.show_all()
def _on_stop_clicked(self, button):
self.alarm.stop()
self.controls_notebook.set_current_page(0)
def _on_snooze_clicked(self, button):
self.right_button.set_sensitive(False)
self.alarm.snooze()
def _on_switch(self, switch, param):
switch_active = switch.get_active()
if self.alarm.active != switch_active:
self.alarm.set_active(switch_active)
self.view.save_alarms()
def open_edit_dialog(self):
# implicitely disable, we do not want to ring while editing.
self.edited_active = self.alarm.active
self.alarm.set_active(False)
window = AlarmDialog(self.get_toplevel(), self.alarm)
window.connect("response", self._on_dialog_response)
window.show_all()
def _on_dialog_response(self, dialog, response):
if response == 1:
new_alarm = dialog.get_alarm_item()
alarm = self.view.replace_alarm(self.alarm, new_alarm)
self.set_alarm(alarm)
else:
# edited alarms are always active, instead on cancel
# we restore the previous state
self.alarm.set_active(self.edited_active)
dialog.destroy()
class Alarm(Clock):
class Page:
OVERVIEW = 0
STANDALONE = 1
def __init__(self, toolbar, embed):
Clock.__init__(self, _("Alarm"), toolbar, embed)
# Translators: "New" refers to an alarm
self.new_button = ToolButton(_("New"))
self.new_button.connect('clicked', self._on_new_clicked)
self.select_button = SymbolicToolButton("object-select-symbolic")
self.select_button.connect('clicked', self._on_select_clicked)
self.done_button = ToolButton(_("Done"))
self.done_button.get_style_context().add_class('suggested-action')
self.done_button.connect("clicked", self._on_done_clicked)
self.back_button = SymbolicToolButton("go-previous-symbolic")
self.back_button.connect('clicked', self._on_back_clicked)
self.edit_button = ToolButton(_("Edit"))
self.edit_button.connect('clicked', self._on_edit_clicked)
self.delete_button = Gtk.Button(_("Delete"))
self.delete_button.connect('clicked', self._on_delete_clicked)
self.liststore = Gtk.ListStore(bool, str, object)
self.iconview = SelectableIconView(self.liststore, 0, 1, self._thumb_data_func)
self.iconview.connect("item-activated", self._on_item_activated)
self.iconview.connect("selection-changed", self._on_selection_changed)
contentview = ContentView(self.iconview,
"alarm-symbolic",
_("Select <b>New</b> to add an alarm"))
self.standalone = AlarmStandalone(self)
self.insert_page(contentview, Alarm.Page.OVERVIEW)
self.insert_page(self.standalone, Alarm.Page.STANDALONE)
self.set_current_page(Alarm.Page.OVERVIEW)
self.storage = AlarmsStorage()
self.load_alarms()
wallclock.connect("time-changed", self._tick_alarms)
def _on_new_clicked(self, button):
self.activate_new()
def _on_select_clicked(self, button):
self.iconview.set_selection_mode(True)
self.update_toolbar()
def _on_done_clicked(self, button):
self.iconview.set_selection_mode(False)
self.update_toolbar()
self._embed.hide_floatingbar()
def _on_back_clicked(self, button):
self.change_page_spotlight(Alarm.Page.OVERVIEW)
def _on_edit_clicked(self, button):
self.standalone.open_edit_dialog()
def _on_delete_clicked(self, button):
selection = self.iconview.get_selection()
alarms = [self.liststore[path][2] for path in selection]
self.delete_alarms(alarms)
self.iconview.selection_deleted()
def _thumb_data_func(self, view, cell, store, i, data):
alarm = store.get_value(i, 2)
cell.text = alarm.alarm_time_string
cell.subtext = alarm.alarm_repeat_string
if alarm.active:
cell.css_class = "active"
else:
cell.css_class = "inactive"
@GObject.Signal
def alarm_ringing(self):
self.set_current_page(Alarm.Page.STANDALONE)
def _tick_alarms(self, *args):
for a in self.alarms:
if a.tick():
# a.tick() returns True if the state changed
if a.state == AlarmItem.State.RINGING:
self.standalone.set_alarm(a)
self.emit("alarm-ringing")
elif self.standalone.alarm and self.standalone.alarm == a:
# update the alarm shown in the standalone, it
# might be visible
self.standalone.set_alarm(a)
def _on_item_activated(self, iconview, path):
alarm = self.liststore[path][2]
self.standalone.set_alarm(alarm)
self.change_page_spotlight(Alarm.Page.STANDALONE)
def _on_selection_changed(self, iconview):
selection = iconview.get_selection()
n_selected = len(selection)
self._toolbar.set_selection(n_selected)
if n_selected > 0:
self._embed.show_floatingbar(self.delete_button)
else:
self._embed.hide_floatingbar()
def load_alarms(self):
self.alarms = self.storage.load()
for alarm in self.alarms:
self._add_alarm_item(alarm)
self.select_button.set_sensitive(self.alarms)
def save_alarms(self):
self.storage.save(self.alarms)
self.liststore.clear()
self.load_alarms()
def add_alarm(self, alarm):
if alarm in self.alarms:
self.replace_alarm(alarm, alarm)
else:
self.alarms.append(alarm)
self._add_alarm_item(alarm)
self.show_all()
self.save_alarms()
def _add_alarm_item(self, alarm):
label = GLib.markup_escape_text(alarm.name)
self.liststore.append([False, "<b>%s</b>" % label, alarm])
def replace_alarm(self, old_alarm, new_alarm):
i = self.alarms.index(old_alarm)
self.alarms[i] = new_alarm
self.save_alarms()
return self.alarms[i]
def delete_alarms(self, alarms):
self.alarms = [a for a in self.alarms if a not in alarms]
self.save_alarms()
def update_toolbar(self):
self._toolbar.clear()
if self.get_current_page() == Alarm.Page.OVERVIEW:
if self.iconview.selection_mode:
self._toolbar.set_mode(Toolbar.Mode.SELECTION)
self._toolbar.add_widget(self.done_button, Gtk.PackType.END)
else:
self._toolbar.set_mode(Toolbar.Mode.NORMAL)
self._toolbar.add_widget(self.new_button)
self._toolbar.add_widget(self.select_button, Gtk.PackType.END)
elif self.get_current_page() == Alarm.Page.STANDALONE:
self._toolbar.set_mode(Toolbar.Mode.STANDALONE)
self._toolbar.add_widget(self.back_button)
self._toolbar.add_widget(self.edit_button, Gtk.PackType.END)
self._toolbar.set_title(GLib.markup_escape_text(
self.standalone.alarm.name))
def activate_new(self):
window = AlarmDialog(self.get_toplevel())
window.connect("response", self._on_dialog_response)
window.show_all()
def _on_dialog_response(self, dialog, response):
if response == 1:
alarm = dialog.get_alarm_item()
self.add_alarm(alarm)
dialog.destroy()
|
axt_message = '''
struct axt_message {
uint32_t num_points;
uint32_t version;
uint32_t scanner_type;
uint32_t ecu_id;
uint32_t timestamp_sensor;
double start_angle;
double end_angle;
uint32_t scan_counter;
int8_t * channel;
int8_t * point_status;
float *x;
float *y;
float *z;
double timestamp;
char *host;
};
'''
carmen_point_t = '''
struct carmen_point_t {
double x;
double y;
double theta;
};
'''
carmen_base_odometry_message = '''
struct carmen_base_odometry_message {
double x;
double y;
double theta;
double tv;
double rv;
double acceleration;
double timestamp;
char *host;
};
'''
carmen_base_velocity_message = '''
struct carmen_base_velocity_message {
double tv;
double rv;
double timestamp;
char *host;
};
'''
carmen_laser_laser_config_t = '''
struct carmen_laser_laser_config_t {
int32_t laser_type;
double start_angle;
double fov;
double angular_resolution;
double maximum_range;
double accuracy;
int32_t remission_mode;
};
'''
carmen_laser_laser_message = '''
struct carmen_laser_laser_message {
int32_t id;
carmen_laser_laser_config_t config;
int32_t num_readings;
float *range;
int32_t num_remissions;
float *remission;
double timestamp;
char *host;
};
'''
carmen_robot_laser_message = '''
struct carmen_robot_laser_message {
int32_t id;
carmen_laser_laser_config_t config;
int32_t num_readings;
float * range;
int8_t * tooclose;
int32_t num_remissions;
float * remission;
carmen_point_t laser_pose;
carmen_point_t robot_pose;
double tv;
double rv;
double forward_safety_dist;
double side_safety_dist;
double turn_axis;
double timestamp;
char *host;
};
'''
carmen_localize_globalpos_message = '''
struct carmen_localize_globalpos_message {
carmen_point_t globalpos;
carmen_point_t globalpos_std;
carmen_point_t odometrypos;
double globalpos_xy_cov;
int32_t converged;
double timestamp;
char *host;
};
'''
path_message = '''
struct path_message {
uint32_t point_count;
double * x;
double * y;
double * theta;
double velocity;
double timestamp;
char *host;
};
'''
path_stop_message = '''
struct path_stop_message {
double timestamp;
char *host;
};
'''
smart_velocity_message = '''
struct smart_velocity_message {
double tv;
double steering_angle;
double timestamp;
char* host;
};
'''
smart_status_message = '''
struct smart_status_message {
double gas_pos;
int32_t gear;
double steering_angle;
double tv;
double rv_front_right;
double rv_front_left;
double rv_rear_right;
double rv_rear_left;
double timestamp;
char* host;
};
'''
|
"""
SpamReport model defines spam reports for specific revisions of reviews. Only one
spam report can be created by a single user for a specific revision.
"""
from critiquebrainz.data import db
from sqlalchemy.dialects.postgresql import UUID
from critiquebrainz.data.model.mixins import DeleteMixin
from datetime import datetime
class SpamReport(db.Model, DeleteMixin):
__tablename__ = 'spam_report'
user_id = db.Column(UUID, db.ForeignKey('user.id', ondelete='CASCADE'), primary_key=True)
revision_id = db.Column(db.Integer, db.ForeignKey('revision.id', ondelete='CASCADE'), primary_key=True)
reported_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
@classmethod
def create(cls, revision_id, user):
report = cls(user=user, revision_id=revision_id)
db.session.add(report)
db.session.commit()
return report
|
__author__ = 'Freelander, Bravo17, Just a baka'
__version__ = '1.0.17'
import b3, threading
from b3 import functions
import b3.events
import b3.plugin
import urllib2, urllib
import os.path
import StringIO
import gzip
import time
import socket
import re, sys
user_agent = "B3 Cod7Http plugin/%s" % __version__
class Cod7HttpPlugin(b3.plugin.Plugin):
"""Downloads and appends the remote game log file for CoD7 to a local
log file from a http location given by GSP.
"""
requiresConfigFile = False
#Timout url set by gameservers.com
_timeout_url = 'http://logs.gameservers.com/timeout'
_default_timeout = 5
_logAppend = True
lastlines = ''
httpthreadinst = None
def onLoadConfig(self):
pass
def initThread(self):
"""Starts a thread for cod7http plugin."""
thread1 = threading.Thread(target=self.processData)
self.info("Starting cod7http thread")
thread1.start()
self.httpthreadinst = thread1
def onStartup(self):
"""Sets and loads config values from the main config file."""
versionsearch = re.search("^((?P<mainversion>[0-9]).(?P<lowerversion>[0-9]+)?)", sys.version)
version = int(versionsearch.group(3))
if version < 6:
self.error('Python Version %s, this is not supported and may lead to hangs. Please update Python to 2.6' % versionsearch.group(1))
self.console.die()
if self.console.config.has_option('server', 'local_game_log'):
self.locallog = self.console.config.getpath('server', 'local_game_log')
else:
# setup ip addresses
self._publicIp = self.console.config.get('server', 'public_ip')
self._port = self.console.config.getint('server', 'port')
if self._publicIp[0:1] == '~' or self._publicIp[0:1] == '/':
# load ip from a file
f = file(self.console.getAbsolutePath(self._publicIp))
self._publicIp = f.read().strip()
f.close()
logext = str(self._publicIp.replace('.', '_'))
logext = 'games_mp_' + logext + '_' + str(self._port) + '.log'
self.locallog = os.path.normpath(os.path.expanduser(logext))
self.debug('Local Game Log is %s' % self.locallog)
if self.console.config.has_option('server', 'log_append'):
self._logAppend =self.console.config.getboolean('server', 'log_append')
else:
self._logAppend = False
if self.console.config.has_option('server', 'log_timeout'):
self.timeout = self.console.config.get('server', 'log_timeout')
else:
#get timeout value set by gameservers.com
try:
req = urllib2.Request(self._timeout_url)
req.headers['User-Agent'] = user_agent
f = urllib2.urlopen(req)
self.timeout = int(f.readlines()[0])
f.close()
self.debug('Using timeout value of %s seconds' % self.timeout)
except (urllib2.HTTPError, urllib2.URLError, socket.timeout), error:
self.timeout = self._default_timeout
self.error('ERROR: %s' % error)
self.error('ERROR: Couldn\'t get timeout value. Using default %s seconds' % self.timeout)
if self.console.config.get('server','game_log')[0:7] == 'http://' :
self._url = self.console.config.get('server','game_log')
self.initThread()
else:
self.error('Your game log url doesn\'t seem to be valid. Please check your config file')
self.console.die()
def httpThreadalive(self):
"""Test whether processData thread is still running."""
return self.httpthreadinst.isAlive()
def writeCompletelog(self, locallog, remotelog):
"""Will restart writing the local log when bot started for the first time
or if last line cannot be found in remote chunk
"""
#pause the bot from parsing, because we don't
#want to parse the log from the beginning
if self.console._paused is False:
self.console.pause()
self.debug('Pausing')
# Remove last line if not complete
i = remotelog.rfind ('\r\n')
remotelog = remotelog[:i + 2]
# remove any blank lines
while remotelog[-4:-2] == '\r\n':
remotelog = remotelog[:-2]
# use Just a baka's lazy cursor
self.lastlines = remotelog[-1000:]
#create or open the local log file
if self._logAppend:
output = open(locallog, 'ab')
else:
output = open(locallog, 'wb')
output.write('\r\n')
output.write('B3 has restarted writing the log file\r\n')
output.write('\r\n')
output.close()
self.info('Remote log downloaded successfully')
#we can now start parsing again
if self.console._paused:
self.console.unpause()
self.debug('Unpausing')
def processData(self):
"""Main method for plugin. It's processed by initThread method."""
_lastLine = True
_firstRead = True
n = 0
while self.console.working:
remotelog = ''
response = ''
remote_log_data = ''
#Specify range depending on if the last line
#is in the remote log chunk or not
if _lastLine:
bytes = 'bytes=-10000'
else:
bytes = 'bytes=-100000'
headers = { 'User-Agent' : user_agent,
'Range' : bytes,
'Accept-encoding' : 'gzip' }
#self.verbose('Sending request')
request = urllib2.Request(self._url, None, headers)
#get remote log url response and headers
try:
response = urllib2.urlopen(request)
headers = response.info()
#buffer/download remote log
if response != '':
remote_log_data = response.read()
remotelogsize = round((len(remote_log_data)/float(1024)), 2)
#self.verbose('Downloaded: %s KB total' % remotelogsize)
try:
#close remote file
response.close()
except AttributeError, error:
self.error('ERROR: %s' % error)
except (urllib2.HTTPError, urllib2.URLError), error:
self.error('HTTP ERROR: %s' % error)
except socket.timeout:
self.error('TIMEOUT ERROR: Socket Timed out!')
#start keeping the time
start = time.time()
#decompress remote log and return for use
# First, make sure that there is domething worth decompressing
# In case the server has just done a restart
if len(remote_log_data) > 0:
try:
#self.debug('Content-Encoding: %s' % headers.get('Content-Encoding'))
if headers.get('Content-Encoding') == 'gzip':
compressedstream = StringIO.StringIO(remote_log_data)
gzipper = gzip.GzipFile(fileobj=compressedstream)
remotelog = gzipper.read()
else:
remotelog = remote_log_data
except IOError, error:
remotelog = ''
self.error('IOERROR: %s' % error)
if os.path.exists(self.locallog) and os.path.getsize(self.locallog) > 0 and not _firstRead:
#check if last line is in the remote log chunk
if remotelog.find(self.lastlines) != -1:
_lastLine = True
n = 0
#we'll get the new lines i.e what is available after the last line
#of our local log file
try:
checklog = remotelog.rpartition(self.lastlines)
newlog = checklog[2]
# Remove any broken last line
i = newlog.rfind ('\r\n')
newlog = newlog[:i + 2]
# Remove any blank lines
while newlog[-4:-2] == '\r\n':
newlog = newlog[:-2]
except ValueError, error:
self.error ('ValueError: %s' % error)
newlog = ''
# Remove any blank lines from end
#append the additions to our log if there is something and update lazy cursor
if len(newlog) > 0:
output = open(self.locallog,'ab')
output.write(newlog)
output.close()
self.lastlines = remotelog[-1000:]
self.debug('Downloaded %s KB and added %s char(s) to log' % (remotelogsize, len(newlog)))
else:
_lastLine = False
self.debug('Can\'t find last line in the log chunk, checking again...')
n += 1
#check once in a larger chunk and if we are still unable to find last line
#in the remote chunk, restart the process
if n == 2:
self.debug('Logs rotated or unable to find last line in remote log, restarting process...')
self.writeCompletelog(self.locallog, remotelog)
_lastLine = True
n = 0
else:
self.debug('Writing first log read')
self.writeCompletelog(self.locallog, remotelog)
_firstRead = False
#calculate how long it took to process
timespent = time.time() - start
#calculate time to wait until next request.
timeout = float(self.timeout)
#self.verbose('Given timeout value is %s seconds' % timeout)
#self.verbose('Total time spent to process the downloaded file is %s seconds' % timespent)
#Calculate sleep time for next request. Adding 0.1 secs to prevent HTTP Error 403 errors
wait = float((timeout - timespent) + 0.1)
if wait <= 0:
wait = 1
#self.verbose('Next request in %s second(s)' % wait)
# Make the plugin thread fast-killable
i = 0
w = int(wait)
while i < w and self.console.working:
time.sleep(1)
i += 1
time.sleep(wait - w)
self.verbose('B3 is down, stopping Cod7Http Plugin')
if __name__ == '__main__':
from b3.fake import fakeConsole
p = Cod7HttpPlugin(fakeConsole)
p._url = "http://www.example.com"
p.timeout = 5
p.locallog ='test.log'
p.processData()
|
from Core.models import BookDetail
from Core.models import Book
from Core.keys import API_KEY
from pattern.it import parsetree as it_parsetree
from pattern.en import parsetree as en_parsetree
from pattern.search import search
import requests
import json
from lxml import html
def remove_uncorrect_tokens(tokens):
"""Remove useless tokens"""
tokens = list(set(tokens))
tokens = filter(lambda t: len(t) > 3, tokens)
tokens = sorted(tokens, key=lambda token: -len(token))
return tokens
def get_ngrams(description, lang='it'):
"""
Analyze description and get relevant ngrams using an italian POS tagger,
looking for exact combination of POS pattern
"""
s = it_parsetree(description, relations=True, lemmata=True)
if lang == "en":
s = en_parsetree(description, relations=True, lemmata=True)
matches = []
ngrams = []
for match in search("JJ NN", s):
matches.append(match.constituents())
for match in search("NN JJ", s):
matches.append(match.constituents())
for match in search("NN", s):
matches.append(match.constituents())
for match in matches:
ngrams.append(" ".join([chunk.string for chunk in match]).encode("utf8"))
return remove_uncorrect_tokens(ngrams)
def retrieve_additional_info(isbn):
"""
Retrieve additional info about books not yet processed
Keyword arguments:
@param: isbn the ISBN book code
"""
r = requests.get("https://www.googleapis.com/books/v1/volumes?q=isbn:%s&key=%s&country=IT" %(isbn, API_KEY))
response = json.loads(r.text)
pres = {}
parsed_result = {}
book = None
if "items" in response:
parsed_result = response["items"][0]
if "title" in parsed_result["volumeInfo"]:
book = Book(title=(parsed_result["volumeInfo"]["title"]).encode("utf8"))
if "authors" in parsed_result["volumeInfo"]:
pres["authors"] = [author.encode("utf8") for author in parsed_result["volumeInfo"]["authors"]]
if "categories" in parsed_result["volumeInfo"]:
pres["tags"] = parsed_result["volumeInfo"]["categories"]
if "imageLinks" in parsed_result["volumeInfo"]:
pres["image_link"] = parsed_result["volumeInfo"]["imageLinks"]["thumbnail"]
if "industryIdentifiers" in parsed_result["volumeInfo"]:
pres["isbn"] = isbn
if not "description" in parsed_result["volumeInfo"]:
r = requests.get("%s?key=%s&country=IT" %(parsed_result["selfLink"], API_KEY))
parsed_result = (json.loads(r.text))
if "description" in parsed_result["volumeInfo"]:
pres["description"] = (parsed_result["volumeInfo"]["description"]).encode("utf8")
else:
pres["description"] = ""
lang = parsed_result["volumeInfo"]["language"]
pres["parsed_tags"] = get_ngrams(pres["description"], lang=lang)
book.details = BookDetail(**pres)
return book
|
"""
sample program 1
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from appinstance import AppInstance, AppInstanceRunning
import time
def main():
"""
main
"""
try:
with AppInstance("arg1", True):
print("sleep for 5 sec")
time.sleep(2)
except AppInstanceRunning:
print("already running")
if __name__ == "__main__":
main()
|
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
if len(A) == 0 : return 0
i,j,LEN = 0, 1, len(A)
while j < LEN :
if A[i] != A[j] :
A[i+1], i = A[j], i+1
j += 1
return i+1
|
from datetime import datetime
import dbentry
import os
import time
import random
import read_init as sensor
def get_temp():
""" Get temperature data.
"""
# TODO: Call sensor routine to get temperature
cur_temp = random.randint(0, 100)
return cur_temp;
def write_to_file(temp, date_obj):
""" Write contents to a file.
The file is name .sensor_info, and is located in the user's
home directory.
"""
temp_str = str(temp).zfill(3)
date_str = date_obj.strftime('%m/%d/%Y-%H:%M')
home_path = os.path.expanduser("~")
file_ptr = open(home_path + "/.sensor_info", "a");
file_ptr.write(date_str + "," + temp_str + "\n")
def main():
""" Call sub routines to return temperature data,
other sensor data, and then call function to write
to write the data to a file.
"""
# Try calling 5 times, with a 2 second gap, if a valid value
# isn't returned
for i in range(0,5):
temp, hum = sensor.get_sensor_val();
if temp is not -1 and hum is not -1:
dbentry.write_values_to_db(temp, hum);
break
else:
time.sleep(2)
if __name__ == '__main__':
main()
|
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response
from django.contrib import auth
from django.template import Context
from social.layers.layers_manager import *
from social.rest.layers import create_layer, delete_layer, request_layer
from social.rest.notes import note_upload
from social.rest.photos import photo_upload
from social.rest.sounds import sound_upload
from social.rest.views import node_delete
from social.core.api_layer import get_layer
def init (request):
con = Context({ 'mensaje' : 'hola', })
html = render_to_response ( 'backend/login/index.html', con )
return HttpResponse( html)
def login(request):
if request.method == 'POST':
password = request.POST.get('password', '')
username = request.POST.get('username', '')
user = auth.authenticate( username=username , password=password)
if user is not None:
auth.login(request, user)
return HttpResponseRedirect('/backend/home')
else:
con = Context({ 'message' : "Error de autenticacion" , })
return render_to_response('backend/login/', con )
html = render_to_response ('backend/login/index.html')
return HttpResponse( html)
def home (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/login/')
html = render_to_response ('backend/home.html')
return HttpResponse( html)
def layers (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/layers/')
msg = ""
if request.method == 'POST':
data = create_layer(request)
success, results = layers_list (request.user,True)
con = Context({ 'url_create_layer': '/backend/layers/',
'results' : results ,
'msg' : msg })
html = render_to_response ('backend/layers.html', con )
return HttpResponse( html)
def get_layers_content (request, layer_id):
options = request.GET.copy()
options["user"] = request.user
options["layer_id"] = layer_id
options["latitude"]=0.0
options["longitude"]=0.0
options["radius"]=0.0
options["category"]="0"
options["elems"]=10000
success, response = layers_search(layer_id, "", options)
print response
print type (response[0].position)
con = Context({ 'url_create_layer': '/backend/layers/',
'list' : response,
'layer_id' : layer_id,
'msg' : "" })
html = render_to_response ('backend/node_list.html', con )
return HttpResponse( html)
def layers_delete (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('/backend/layers/')
if request.method == 'POST':
idInt = int(request.POST["id"])
res = delete_layer(request,idInt)
return HttpResponseRedirect('/backend/layers/')
def contents (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/contents/')
success, results = layers_list (request.user,True)
msg=""
con = Context({ 'results' : results ,
'msg' : msg })
html = render_to_response ('backend/contents.html', con )
return HttpResponse(html)
def list_nodes (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/contents/')
con = Context({ 'layer' : layer ,
'msg' : msg })
html = render_to_response ('backend/contents.html', con )
return HttpResponse(html)
def delete_node (request, layer_id, node_id):
result = node_delete(request,layer_id,node_id)
print result
return get_layers_content(request, layer_id)
def note_create (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/contents/')
res = note_upload(request, int(request.POST["layer"]) )
success, results = layers_list (request.user,True)
print res
msg=""
con = Context({ 'results' : results ,
'msg' : msg })
html = render_to_response ('backend/contents.html', con )
return HttpResponse(html)
def photo_create (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/contents/')
res = photo_upload(request, int(request.POST["layer"]) )
success, results = layers_list (request.user,True)
print res
msg=""
con = Context({ 'results' : results ,
'msg' : msg })
html = render_to_response ('backend/contents.html', con )
return HttpResponse(html)
def sound_create (request):
if not request.user.is_authenticated():
return HttpResponseRedirect('backend/contents/')
res = sound_upload(request, int(request.POST["layer"]) )
success, results = layers_list (request.user,True)
print res
msg=""
con = Context({ 'results' : results ,
'msg' : msg })
html = render_to_response ('backend/contents.html', con )
return HttpResponse(html)
|
"Find people who are not related to the selected person"
from gi.repository import Gtk
from gi.repository import GObject
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
ngettext = glocale.translation.ngettext # else "nearby" comments are ignored
from gramps.gen.const import URL_MANUAL_PAGE
from gramps.gen.errors import WindowActiveError
from gramps.gui.plug import tool
from gramps.gen.plug.report import utils
from gramps.gui.editors import EditPerson, EditFamily
from gramps.gui.managedwindow import ManagedWindow
from gramps.gui.utils import ProgressMeter
from gramps.gui.display import display_help
from gramps.gui.glade import Glade
from gramps.gen.lib import Tag
from gramps.gen.db import DbTxn
WIKI_HELP_PAGE = '%s_-_Tools' % URL_MANUAL_PAGE
WIKI_HELP_SEC = _('manual|Not_Related')
class NotRelated(tool.ActivePersonTool, ManagedWindow):
def __init__(self, dbstate, user, options_class, name, callback=None):
uistate = user.uistate
tool.ActivePersonTool.__init__(self, dbstate, uistate, options_class,
name)
if self.fail: # bug #2709 -- fail if we have no active person
return
person_handle = uistate.get_active('Person')
person = dbstate.db.get_person_from_handle(person_handle)
self.name = person.get_primary_name().get_regular_name()
self.title = _('Not related to "%s"') % self.name
ManagedWindow.__init__(self, uistate, [], self.__class__)
self.dbstate = dbstate
self.uistate = uistate
self.db = dbstate.db
topDialog = Glade()
topDialog.connect_signals({
"destroy_passed_object" : self.close,
"on_help_clicked" : self.on_help_clicked,
"on_delete_event" : self.close,
})
window = topDialog.toplevel
title = topDialog.get_object("title")
self.set_window(window, title, self.title)
self.setup_configs('interface.notrelated', 450, 400)
self.tagcombo = topDialog.get_object("tagcombo")
tagmodel = Gtk.ListStore(str)
self.tagcombo.set_model(tagmodel)
self.tagcombo.set_entry_text_column(0)
tagmodel.append((_('ToDo'),))
tagmodel.append((_('NotRelated'),))
self.tagcombo.set_sensitive(False)
self.tagapply = topDialog.get_object("tagapply")
self.tagapply.set_sensitive(False)
self.tagapply.connect('clicked', self.applyTagClicked)
# start the progress indicator
self.progress = ProgressMeter(self.title, _('Starting'),
parent=self.uistate.window)
# setup the columns
self.model = Gtk.TreeStore(
GObject.TYPE_STRING, # 0==name
GObject.TYPE_STRING, # 1==person gid
GObject.TYPE_STRING, # 2==parents
GObject.TYPE_STRING, # 3==tags
GObject.TYPE_STRING) # 4==family gid (not shown to user)
# note -- don't assign the model to the tree until it has been populated,
# otherwise the screen updates are terribly slow while names are appended
self.treeView = topDialog.get_object("treeview")
col1 = Gtk.TreeViewColumn(_('Name'), Gtk.CellRendererText(), text=0)
col2 = Gtk.TreeViewColumn(_('ID'), Gtk.CellRendererText(), text=1)
col3 = Gtk.TreeViewColumn(_('Parents'), Gtk.CellRendererText(), text=2)
col4 = Gtk.TreeViewColumn(_('Tags'), Gtk.CellRendererText(), text=3)
col1.set_resizable(True)
col2.set_resizable(True)
col3.set_resizable(True)
col4.set_resizable(True)
col1.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
col2.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
col3.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
col4.set_sizing(Gtk.TreeViewColumnSizing.AUTOSIZE)
col1.set_sort_column_id(0)
col4.set_sort_column_id(3)
self.treeView.append_column(col1)
self.treeView.append_column(col2)
self.treeView.append_column(col3)
self.treeView.append_column(col4)
self.treeSelection = self.treeView.get_selection()
self.treeSelection.set_mode(Gtk.SelectionMode.MULTIPLE)
self.treeSelection.set_select_function(self.selectIsAllowed, None)
self.treeSelection.connect('changed', self.rowSelectionChanged)
self.treeView.connect('row-activated', self.rowActivated)
# initialize a few variables we're going to need
self.numberOfPeopleInDatabase = self.db.get_number_of_people()
self.numberOfRelatedPeople = 0
self.numberOfUnrelatedPeople = 0
# create the sets used to track related and unrelated people
self.handlesOfPeopleToBeProcessed = set()
self.handlesOfPeopleAlreadyProcessed = set()
self.handlesOfPeopleNotRelated = set()
# build a set of all people related to the selected person
self.handlesOfPeopleToBeProcessed.add(person.get_handle())
self.findRelatedPeople()
# now that we have our list of related people, find everyone
# in the database who isn't on our list
self.findUnrelatedPeople()
# populate the treeview model with the names of unrelated people
if self.numberOfUnrelatedPeople == 0:
# feature request 2356: avoid genitive form
title.set_text(_('Everyone in the database is related to %s') % self.name)
else:
self.populateModel()
self.model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.treeView.set_model(self.model)
self.treeView.expand_all()
# done searching through the database, so close the progress bar
self.progress.close()
self.show()
def iterIsSeparator(self, model, iter):
# return True only if the row is to be treated as a separator
if self.model.get_value(iter, 1) == '': # does the row have a GID?
return True
return False
def selectIsAllowed(self, selection, model, path, isSelected, userData):
# return True/False depending on if the row being selected is a leaf node
iter = self.model.get_iter(path)
if self.model.get_value(iter, 1) == '': # does the row have a GID?
return False
return True
def rowSelectionChanged(self, selection):
state = selection.count_selected_rows() > 0
self.tagcombo.set_sensitive(state)
self.tagapply.set_sensitive(state)
def rowActivated(self, treeView, path, column):
# first we need to check that the row corresponds to a person
iter = self.model.get_iter(path)
personGid = self.model.get_value(iter, 1)
familyGid = self.model.get_value(iter, 4)
if familyGid != '': # do we have a family?
# get the parent family for this person
family = self.db.get_family_from_gramps_id(familyGid)
if family:
try:
EditFamily(self.dbstate, self.uistate, [], family)
except WindowActiveError:
pass
elif personGid != '': # do we have a person?
# get the person that corresponds to this GID
person = self.db.get_person_from_gramps_id(personGid)
if person:
try:
EditPerson(self.dbstate, self.uistate, [], person)
except WindowActiveError:
pass
def on_help_clicked(self, obj):
"""Display the relevant portion of Gramps manual"""
display_help(WIKI_HELP_PAGE , WIKI_HELP_SEC)
def applyTagClicked(self, button) :
progress = None
rows = self.treeSelection.count_selected_rows()
tag_name = str(self.tagcombo.get_active_text())
# start the db transaction
with DbTxn("Tag not related", self.db) as transaction:
tag = self.db.get_tag_from_name(tag_name)
if not tag:
# create the tag if it doesn't already exist
tag = Tag()
tag.set_name(tag_name)
tag.set_priority(self.db.get_number_of_tags())
tag_handle = self.db.add_tag(tag, transaction)
else:
tag_handle = tag.get_handle()
# if more than 1 person is selected, use a progress indicator
if rows > 1:
progress = ProgressMeter(self.title, _('Starting'),
parent=self.window)
progress.set_pass(
# translators: leave all/any {...} untranslated
#TRANS: no singular form needed, as rows is always > 1
ngettext("Setting tag for {number_of} person",
"Setting tag for {number_of} people",
rows).format(number_of=rows),
rows)
# iterate through all of the selected rows
(model, paths) = self.treeSelection.get_selected_rows()
for path in paths:
if progress:
progress.step()
# for the current row, get the GID and the person from the database
iter = self.model.get_iter(path)
personGid = self.model.get_value(iter, 1)
person = self.db.get_person_from_gramps_id(personGid)
# add the tag to the person
person.add_tag(tag_handle)
# save this change
self.db.commit_person(person, transaction)
# refresh the tags column
self.treeView.set_model(None)
for path in paths:
iter = self.model.get_iter(path)
personGid = self.model.get_value(iter, 1)
person = self.db.get_person_from_gramps_id(personGid)
self.model.set_value(iter, 3, self.get_tag_list(person))
self.treeView.set_model(self.model)
self.treeView.expand_all()
if progress:
progress.close()
def findRelatedPeople(self):
self.progress.set_pass(
# translators: leave all/any {...} untranslated
#TRANS: No singular form is needed.
ngettext("Finding relationships between {number_of} person",
"Finding relationships between {number_of} people",
self.numberOfPeopleInDatabase
).format(number_of=self.numberOfPeopleInDatabase),
self.numberOfPeopleInDatabase)
# as long as we have people we haven't processed yet, keep looping
while len(self.handlesOfPeopleToBeProcessed) > 0:
handle = self.handlesOfPeopleToBeProcessed.pop()
# see if we've already processed this person
if handle in self.handlesOfPeopleAlreadyProcessed:
continue
person = self.db.get_person_from_handle(handle)
# if we get here, then we're dealing with someone new
self.progress.step()
# remember that we've now seen this person
self.handlesOfPeopleAlreadyProcessed.add(handle)
# we have 4 things to do: find (1) spouses, (2) parents, siblings(3), and (4) children
# step 1 -- spouses
for familyHandle in person.get_family_handle_list():
family = self.db.get_family_from_handle(familyHandle)
spouseHandle = utils.find_spouse(person, family)
if spouseHandle and \
spouseHandle not in self.handlesOfPeopleAlreadyProcessed:
self.handlesOfPeopleToBeProcessed.add(spouseHandle)
# step 2 -- parents
for familyHandle in person.get_parent_family_handle_list():
family = self.db.get_family_from_handle(familyHandle)
fatherHandle = family.get_father_handle()
motherHandle = family.get_mother_handle()
if fatherHandle and \
fatherHandle not in self.handlesOfPeopleAlreadyProcessed:
self.handlesOfPeopleToBeProcessed.add(fatherHandle)
if motherHandle and \
motherHandle not in self.handlesOfPeopleAlreadyProcessed:
self.handlesOfPeopleToBeProcessed.add(motherHandle)
# step 3 -- siblings
for familyHandle in person.get_parent_family_handle_list():
family = self.db.get_family_from_handle(familyHandle)
for childRef in family.get_child_ref_list():
childHandle = childRef.ref
if childHandle and \
childHandle not in self.handlesOfPeopleAlreadyProcessed:
self.handlesOfPeopleToBeProcessed.add(childHandle)
# step 4 -- children
for familyHandle in person.get_family_handle_list():
family = self.db.get_family_from_handle(familyHandle)
for childRef in family.get_child_ref_list():
childHandle = childRef.ref
if childHandle and \
childHandle not in self.handlesOfPeopleAlreadyProcessed:
self.handlesOfPeopleToBeProcessed.add(childHandle)
def findUnrelatedPeople(self):
# update our numbers
self.numberOfRelatedPeople = len(self.handlesOfPeopleAlreadyProcessed)
self.numberOfUnrelatedPeople = (self.numberOfPeopleInDatabase -
self.numberOfRelatedPeople)
if self.numberOfUnrelatedPeople > 0:
# we have at least 1 "unrelated" person to find
self.progress.set_pass(
# translators: leave all/any {...} untranslated
ngettext("Looking for {number_of} person",
"Looking for {number_of} people",
self.numberOfUnrelatedPeople
).format(number_of=self.numberOfUnrelatedPeople),
self.numberOfPeopleInDatabase)
# loop through everyone in the database
for handle in self.db.iter_person_handles():
self.progress.step()
# if this person is related, then skip to the next one
if handle in self.handlesOfPeopleAlreadyProcessed:
continue
# if we get here, we have someone who is "not related"
self.handlesOfPeopleNotRelated.add(handle)
def populateModel(self):
self.progress.set_pass(
# translators: leave all/any {...} untranslated
ngettext("Looking up the name of {number_of} person",
"Looking up the names of {number_of} people",
self.numberOfUnrelatedPeople
).format(number_of=self.numberOfUnrelatedPeople),
self.numberOfUnrelatedPeople)
# loop through the entire list of unrelated people
for handle in self.handlesOfPeopleNotRelated:
self.progress.step()
person = self.db.get_person_from_handle(handle)
primaryname = person.get_primary_name()
surname = primaryname.get_surname()
name = primaryname.get_name()
gid = person.get_gramps_id()
# Retrieve the sorted tag list
tag_list = self.get_tag_list(person)
# find the names of the parents
familygid = ''
parentNames = ''
parentFamilyHandle = person.get_main_parents_family_handle()
if parentFamilyHandle:
parentFamily = self.db.get_family_from_handle(parentFamilyHandle)
familygid = parentFamily.get_gramps_id()
fatherName = None
motherName = None
fatherHandle = parentFamily.get_father_handle()
if fatherHandle:
father = self.db.get_person_from_handle(fatherHandle)
fatherName = father.get_primary_name().get_first_name()
motherHandle = parentFamily.get_mother_handle()
if motherHandle:
mother = self.db.get_person_from_handle(motherHandle)
motherName = mother.get_primary_name().get_first_name()
# now that we have the names, come up with a label we can use
if fatherName:
parentNames += fatherName
if fatherName and motherName:
parentNames += ' & '
if motherName:
parentNames += motherName
# get the surname node (or create it if it doesn't exist)
# start with the root
iter = self.model.get_iter_first()
# look for a node with a matching surname
while iter:
if self.model.get_value(iter, 0) == surname:
break;
iter = self.model.iter_next(iter)
# if we don't have a valid iter, then create a new top-level node
if not iter:
iter = self.model.append(None, [surname, '', '', '', ''])
# finally, we now get to add this person to the model
self.model.append(iter, [name, gid, parentNames, tag_list,
familygid])
def build_menu_names(self, obj):
return (self.title, None)
def get_tag_list(self, person):
"""
Return a sorted list of tag names for the given person.
"""
tags = []
for handle in person.get_tag_list():
tag = self.db.get_tag_from_handle(handle)
tags.append(tag.get_name())
tags.sort(key=glocale.sort_key)
# TODO for Arabic, should the next line's comma be translated?
return ', '.join(tags)
class NotRelatedOptions(tool.ToolOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, person_id=None):
""" Initialize the options class """
tool.ToolOptions.__init__(self, name, person_id)
|
'''command to send changesets as (a series of) patch emails
The series is started off with a "[PATCH 0 of N]" introduction, which
describes the series as a whole.
Each patch email has a Subject line of "[PATCH M of N] ...", using the
first line of the changeset description as the subject text. The
message contains two or three body parts:
- The changeset description.
- [Optional] The result of running diffstat on the patch.
- The patch itself, as generated by :hg:`export`.
Each message refers to the first in the series using the In-Reply-To
and References headers, so they will show up as a sequence in threaded
mail and news readers, and in mail archives.
To configure other defaults, add a section like this to your
configuration file::
[email]
from = My Name <my@email>
to = recipient1, recipient2, ...
cc = cc1, cc2, ...
bcc = bcc1, bcc2, ...
reply-to = address1, address2, ...
Use ``[patchbomb]`` as configuration section name if you need to
override global ``[email]`` address settings.
Then you can use the :hg:`email` command to mail a series of
changesets as a patchbomb.
You can also either configure the method option in the email section
to be a sendmail compatible mailer or fill out the [smtp] section so
that the patchbomb extension can automatically send patchbombs
directly from the commandline. See the [email] and [smtp] sections in
hgrc(5) for details.
'''
import os, errno, socket, tempfile, cStringIO
import email
import email.Generator
import email.MIMEMultipart
from mercurial import cmdutil, commands, hg, mail, patch, util
from mercurial import scmutil
from mercurial.i18n import _
from mercurial.node import bin
cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'
def prompt(ui, prompt, default=None, rest=':'):
if default:
prompt += ' [%s]' % default
return ui.prompt(prompt + rest, default)
def introwanted(opts, number):
'''is an introductory message apparently wanted?'''
return number > 1 or opts.get('intro') or opts.get('desc')
def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
patchname=None):
desc = []
node = None
body = ''
for line in patchlines:
if line.startswith('#'):
if line.startswith('# Node ID'):
node = line.split()[-1]
continue
if line.startswith('diff -r') or line.startswith('diff --git'):
break
desc.append(line)
if not patchname and not node:
raise ValueError
if opts.get('attach') and not opts.get('body'):
body = ('\n'.join(desc[1:]).strip() or
'Patch subject is complete summary.')
body += '\n\n\n'
if opts.get('plain'):
while patchlines and patchlines[0].startswith('# '):
patchlines.pop(0)
if patchlines:
patchlines.pop(0)
while patchlines and not patchlines[0].strip():
patchlines.pop(0)
ds = patch.diffstat(patchlines, git=opts.get('git'))
if opts.get('diffstat'):
body += ds + '\n\n'
addattachment = opts.get('attach') or opts.get('inline')
if not addattachment or opts.get('body'):
body += '\n'.join(patchlines)
if addattachment:
msg = email.MIMEMultipart.MIMEMultipart()
if body:
msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
opts.get('test'))
binnode = bin(node)
# if node is mq patch, it will have the patch file's name as a tag
if not patchname:
patchtags = [t for t in repo.nodetags(binnode)
if t.endswith('.patch') or t.endswith('.diff')]
if patchtags:
patchname = patchtags[0]
elif total > 1:
patchname = cmdutil.makefilename(repo, '%b-%n.patch',
binnode, seqno=idx,
total=total)
else:
patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
disposition = 'inline'
if opts.get('attach'):
disposition = 'attachment'
p['Content-Disposition'] = disposition + '; filename=' + patchname
msg.attach(p)
else:
msg = mail.mimetextpatch(body, display=opts.get('test'))
flag = ' '.join(opts.get('flag'))
if flag:
flag = ' ' + flag
subj = desc[0].strip().rstrip('. ')
if not numbered:
subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
else:
tlen = len(str(total))
subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
msg['X-Mercurial-Node'] = node
msg['X-Mercurial-Series-Index'] = '%i' % idx
msg['X-Mercurial-Series-Total'] = '%i' % total
return msg, subj, ds
emailopts = [
('', 'body', None, _('send patches as inline message text (default)')),
('a', 'attach', None, _('send patches as attachments')),
('i', 'inline', None, _('send patches as inline attachments')),
('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
('c', 'cc', [], _('email addresses of copy recipients')),
('', 'confirm', None, _('ask for confirmation before sending')),
('d', 'diffstat', None, _('add diffstat output to messages')),
('', 'date', '', _('use the given date as the sending date')),
('', 'desc', '', _('use the given file as the series description')),
('f', 'from', '', _('email address of sender')),
('n', 'test', None, _('print messages that would be sent')),
('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
('', 'reply-to', [], _('email addresses replies should be sent to')),
('s', 'subject', '', _('subject of first message (intro or single patch)')),
('', 'in-reply-to', '', _('message identifier to reply to')),
('', 'flag', [], _('flags to add in subject prefixes')),
('t', 'to', [], _('email addresses of recipients'))]
@command('email',
[('g', 'git', None, _('use git extended diff format')),
('', 'plain', None, _('omit hg patch header')),
('o', 'outgoing', None,
_('send changes not found in the target repository')),
('b', 'bundle', None, _('send changes not in target as a binary bundle')),
('', 'bundlename', 'bundle',
_('name of the bundle attachment file'), _('NAME')),
('r', 'rev', [], _('a revision to send'), _('REV')),
('', 'force', None, _('run even when remote repository is unrelated '
'(with -b/--bundle)')),
('', 'base', [], _('a base changeset to specify instead of a destination '
'(with -b/--bundle)'), _('REV')),
('', 'intro', None, _('send an introduction email for a single patch')),
] + emailopts + commands.remoteopts,
_('hg email [OPTION]... [DEST]...'))
def patchbomb(ui, repo, *revs, **opts):
'''send changesets by email
By default, diffs are sent in the format generated by
:hg:`export`, one per message. The series starts with a "[PATCH 0
of N]" introduction, which describes the series as a whole.
Each patch email has a Subject line of "[PATCH M of N] ...", using
the first line of the changeset description as the subject text.
The message contains two or three parts. First, the changeset
description.
With the -d/--diffstat option, if the diffstat program is
installed, the result of running diffstat on the patch is inserted.
Finally, the patch itself, as generated by :hg:`export`.
With the -d/--diffstat or --confirm options, you will be presented
with a final summary of all messages and asked for confirmation before
the messages are sent.
By default the patch is included as text in the email body for
easy reviewing. Using the -a/--attach option will instead create
an attachment for the patch. With -i/--inline an inline attachment
will be created. You can include a patch both as text in the email
body and as a regular or an inline attachment by combining the
-a/--attach or -i/--inline with the --body option.
With -o/--outgoing, emails will be generated for patches not found
in the destination repository (or only those which are ancestors
of the specified revisions if any are provided)
With -b/--bundle, changesets are selected as for --outgoing, but a
single email containing a binary Mercurial bundle as an attachment
will be sent.
With -m/--mbox, instead of previewing each patchbomb message in a
pager or sending the messages directly, it will create a UNIX
mailbox file with the patch emails. This mailbox file can be
previewed with any mail user agent which supports UNIX mbox
files.
With -n/--test, all steps will run, but mail will not be sent.
You will be prompted for an email recipient address, a subject and
an introductory message describing the patches of your patchbomb.
Then when all is done, patchbomb messages are displayed. If the
PAGER environment variable is set, your pager will be fired up once
for each patchbomb message, so you can verify everything is alright.
In case email sending fails, you will find a backup of your series
introductory message in ``.hg/last-email.txt``.
Examples::
hg email -r 3000 # send patch 3000 only
hg email -r 3000 -r 3001 # send patches 3000 and 3001
hg email -r 3000:3005 # send patches 3000 through 3005
hg email 3000 # send patch 3000 (deprecated)
hg email -o # send all patches not in default
hg email -o DEST # send all patches not in DEST
hg email -o -r 3000 # send all ancestors of 3000 not in default
hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
hg email -b # send bundle of all patches not in default
hg email -b DEST # send bundle of all patches not in DEST
hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
hg email -o -m mbox && # generate an mbox file...
mutt -R -f mbox # ... and view it with mutt
hg email -o -m mbox && # generate an mbox file ...
formail -s sendmail \\ # ... and use formail to send from the mbox
-bm -t < mbox # ... using sendmail
Before using this command, you will need to enable email in your
hgrc. See the [email] section in hgrc(5) for details.
'''
_charsets = mail._charsets(ui)
bundle = opts.get('bundle')
date = opts.get('date')
mbox = opts.get('mbox')
outgoing = opts.get('outgoing')
rev = opts.get('rev')
# internal option used by pbranches
patches = opts.get('patches')
def getoutgoing(dest, revs):
'''Return the revisions present locally but not in dest'''
url = ui.expandpath(dest or 'default-push', dest or 'default')
url = hg.parseurl(url)[0]
ui.status(_('comparing with %s\n') % util.hidepassword(url))
revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
if not revs:
revs = [len(repo) - 1]
revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
if not revs:
ui.status(_("no changes found\n"))
return []
return [str(r) for r in revs]
def getpatches(revs):
prev = repo['.'].rev()
for r in scmutil.revrange(repo, revs):
if r == prev and (repo[None].files() or repo[None].deleted()):
ui.warn(_('warning: working directory has '
'uncommitted changes\n'))
output = cStringIO.StringIO()
cmdutil.export(repo, [r], fp=output,
opts=patch.diffopts(ui, opts))
yield output.getvalue().split('\n')
def getbundle(dest):
tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
tmpfn = os.path.join(tmpdir, 'bundle')
try:
commands.bundle(ui, repo, tmpfn, dest, **opts)
fp = open(tmpfn, 'rb')
data = fp.read()
fp.close()
return data
finally:
try:
os.unlink(tmpfn)
except OSError:
pass
os.rmdir(tmpdir)
if not (opts.get('test') or mbox):
# really sending
mail.validateconfig(ui)
if not (revs or rev or outgoing or bundle or patches):
raise util.Abort(_('specify at least one changeset with -r or -o'))
if outgoing and bundle:
raise util.Abort(_("--outgoing mode always on with --bundle;"
" do not re-specify --outgoing"))
if outgoing or bundle:
if len(revs) > 1:
raise util.Abort(_("too many destinations"))
dest = revs and revs[0] or None
revs = []
if rev:
if revs:
raise util.Abort(_('use only one form to specify the revision'))
revs = rev
if outgoing:
revs = getoutgoing(dest, rev)
if bundle:
opts['revs'] = revs
# start
if date:
start_time = util.parsedate(date)
else:
start_time = util.makedate()
def genmsgid(id):
return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
def getdescription(body, sender):
if opts.get('desc'):
body = open(opts.get('desc')).read()
else:
ui.write(_('\nWrite the introductory message for the '
'patch series.\n\n'))
body = ui.edit(body, sender)
# Save series description in case sendmail fails
msgfile = repo.opener('last-email.txt', 'wb')
msgfile.write(body)
msgfile.close()
return body
def getpatchmsgs(patches, patchnames=None):
msgs = []
ui.write(_('this patch series consists of %d patches.\n\n')
% len(patches))
# build the intro message, or skip it if the user declines
if introwanted(opts, len(patches)):
msg = makeintro(patches)
if msg:
msgs.append(msg)
# are we going to send more than one message?
numbered = len(msgs) + len(patches) > 1
# now generate the actual patch messages
name = None
for i, p in enumerate(patches):
if patchnames:
name = patchnames[i]
msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
len(patches), numbered, name)
msgs.append(msg)
return msgs
def makeintro(patches):
tlen = len(str(len(patches)))
flag = opts.get('flag') or ''
if flag:
flag = ' ' + ' '.join(flag)
prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
subj = (opts.get('subject') or
prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
if not subj:
return None # skip intro if the user doesn't bother
subj = prefix + ' ' + subj
body = ''
if opts.get('diffstat'):
# generate a cumulative diffstat of the whole patch series
diffstat = patch.diffstat(sum(patches, []))
body = '\n' + diffstat
else:
diffstat = None
body = getdescription(body, sender)
msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
msg['Subject'] = mail.headencode(ui, subj, _charsets,
opts.get('test'))
return (msg, subj, diffstat)
def getbundlemsgs(bundle):
subj = (opts.get('subject')
or prompt(ui, 'Subject:', 'A bundle for your repository'))
body = getdescription('', sender)
msg = email.MIMEMultipart.MIMEMultipart()
if body:
msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
datapart.set_payload(bundle)
bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
datapart.add_header('Content-Disposition', 'attachment',
filename=bundlename)
email.Encoders.encode_base64(datapart)
msg.attach(datapart)
msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
return [(msg, subj, None)]
sender = (opts.get('from') or ui.config('email', 'from') or
ui.config('patchbomb', 'from') or
prompt(ui, 'From', ui.username()))
if patches:
msgs = getpatchmsgs(patches, opts.get('patchnames'))
elif bundle:
msgs = getbundlemsgs(getbundle(dest))
else:
msgs = getpatchmsgs(list(getpatches(revs)))
showaddrs = []
def getaddrs(header, ask=False, default=None):
configkey = header.lower()
opt = header.replace('-', '_').lower()
addrs = opts.get(opt)
if addrs:
showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
# not on the command line: fallback to config and then maybe ask
addr = (ui.config('email', configkey) or
ui.config('patchbomb', configkey) or
'')
if not addr and ask:
addr = prompt(ui, header, default=default)
if addr:
showaddrs.append('%s: %s' % (header, addr))
return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
else:
return default
to = getaddrs('To', ask=True)
if not to:
# we can get here in non-interactive mode
raise util.Abort(_('no recipient addresses provided'))
cc = getaddrs('Cc', ask=True, default='') or []
bcc = getaddrs('Bcc') or []
replyto = getaddrs('Reply-To')
if opts.get('diffstat') or opts.get('confirm'):
ui.write(_('\nFinal summary:\n\n'))
ui.write(('From: %s\n' % sender))
for addr in showaddrs:
ui.write('%s\n' % addr)
for m, subj, ds in msgs:
ui.write(('Subject: %s\n' % subj))
if ds:
ui.write(ds)
ui.write('\n')
if ui.promptchoice(_('are you sure you want to send (yn)?'
'$$ &Yes $$ &No')):
raise util.Abort(_('patchbomb canceled'))
ui.write('\n')
parent = opts.get('in_reply_to') or None
# angle brackets may be omitted, they're not semantically part of the msg-id
if parent is not None:
if not parent.startswith('<'):
parent = '<' + parent
if not parent.endswith('>'):
parent += '>'
sender_addr = email.Utils.parseaddr(sender)[1]
sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
sendmail = None
firstpatch = None
for i, (m, subj, ds) in enumerate(msgs):
try:
m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
if not firstpatch:
firstpatch = m['Message-Id']
m['X-Mercurial-Series-Id'] = firstpatch
except TypeError:
m['Message-Id'] = genmsgid('patchbomb')
if parent:
m['In-Reply-To'] = parent
m['References'] = parent
if not parent or 'X-Mercurial-Node' not in m:
parent = m['Message-Id']
m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
start_time = (start_time[0] + 1, start_time[1])
m['From'] = sender
m['To'] = ', '.join(to)
if cc:
m['Cc'] = ', '.join(cc)
if bcc:
m['Bcc'] = ', '.join(bcc)
if replyto:
m['Reply-To'] = ', '.join(replyto)
if opts.get('test'):
ui.status(_('displaying '), subj, ' ...\n')
ui.flush()
if 'PAGER' in os.environ and not ui.plain():
fp = util.popen(os.environ['PAGER'], 'w')
else:
fp = ui
generator = email.Generator.Generator(fp, mangle_from_=False)
try:
generator.flatten(m, 0)
fp.write('\n')
except IOError, inst:
if inst.errno != errno.EPIPE:
raise
if fp is not ui:
fp.close()
else:
if not sendmail:
verifycert = ui.config('smtp', 'verifycert')
if opts.get('insecure'):
ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
try:
sendmail = mail.connect(ui, mbox=mbox)
finally:
ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
ui.status(_('sending '), subj, ' ...\n')
ui.progress(_('sending'), i, item=subj, total=len(msgs))
if not mbox:
# Exim does not remove the Bcc field
del m['Bcc']
fp = cStringIO.StringIO()
generator = email.Generator.Generator(fp, mangle_from_=False)
generator.flatten(m, 0)
sendmail(sender_addr, to + bcc + cc, fp.getvalue())
ui.progress(_('writing'), None)
ui.progress(_('sending'), None)
|
import sys, os
from corpus import *
import hdp
import cPickle
import random, time
from numpy import cumsum, sum
from itertools import izip
from optparse import OptionParser
from glob import glob
np = hdp.np
def parse_args():
parser = OptionParser()
parser.set_defaults(T=300, K=20, D=-1, W=-1, eta=0.01, alpha=1.0, gamma=1.0,
max_time=100, max_iter=-1, var_converge=0.0001, random_seed=999931111,
corpus_name=None, data_path=None, test_data_path=None,
test_data_path_in_folds=None, directory=None)
parser.add_option("--T", type="int", dest="T",
help="top level truncation [300]")
parser.add_option("--K", type="int", dest="K",
help="second level truncation [20]")
parser.add_option("--D", type="int", dest="D",
help="number of documents [-1]")
parser.add_option("--W", type="int", dest="W",
help="size of vocabulary [-1]")
parser.add_option("--eta", type="float", dest="eta",
help="the topic Dirichlet [0.01]")
parser.add_option("--alpha", type="float", dest="alpha",
help="alpha value [1.0]")
parser.add_option("--gamma", type="float", dest="gamma",
help="gamma value [1.0]")
parser.add_option("--max_time", type="int", dest="max_time",
help="max time to run training in seconds [100]")
parser.add_option("--max_iter", type="int", dest="max_iter",
help="max iteration to run training [-1]")
parser.add_option("--var_converge", type="float", dest="var_converge",
help="relative change on doc lower bound [0.0001]")
parser.add_option("--random_seed", type="int", dest="random_seed",
help="the random seed [999931111]")
parser.add_option("--corpus_name", type="string", dest="corpus_name",
help="the corpus name: nature, nyt or wiki [None]")
parser.add_option("--data_path", type="string", dest="data_path",
help="training data path or pattern [None]")
parser.add_option("--test_data_path", type="string", dest="test_data_path",
help="testing data path [None]")
parser.add_option("--test_data_path_in_folds", type="string",
dest="test_data_path_in_folds",
help="testing data prefix for different folds [None]")
parser.add_option("--directory", type="string", dest="directory",
help="output directory [None]")
(options, args) = parser.parse_args()
return options
def run_hdp():
options = parse_args()
random.seed(options.random_seed)
# Read the training data.
c_train_filename = options.data_path
test_data_path = options.test_data_path
c_test = read_data(test_data_path)
c_test_word_count = sum([doc.total for doc in c_test.docs])
if options.test_data_path_in_folds is not None:
test_data_path_in_folds = options.test_data_path_in_folds
test_data_in_folds_filenames = glob(test_data_path_in_folds)
test_data_in_folds_filenames.sort()
num_folds = len(test_data_in_folds_filenames)/2
test_data_train_filenames = []
test_data_test_filenames = []
for i in range(num_folds):
test_data_train_filenames.append(test_data_in_folds_filenames[2*i+1])
test_data_test_filenames.append(test_data_in_folds_filenames[2*i])
c_test_train_folds = [read_data(filename) for filename in test_data_train_filenames]
c_test_test_folds = [read_data(filename) for filename in test_data_test_filenames]
result_directory = "%s/corpus-%s" % (options.directory, options.corpus_name)
print "creating directory %s" % result_directory
if not os.path.isdir(result_directory):
os.makedirs(result_directory)
options_file = file("%s/options.dat" % result_directory, "w")
for opt, value in options.__dict__.items():
options_file.write(str(opt) + " " + str(value) + "\n")
options_file.close()
print "creating hdp instance."
bhdp_hp = hdp.hdp_hyperparameter(options.alpha, options.alpha, options.gamma, options.gamma, False)
bhdp = hdp.hdp(options.T, options.K, options.D, options.W, options.eta, bhdp_hp)
#bhdp.seed_init(c_train)
print "setting up counters and log files."
iter = 0
total_time = 0.0
total_doc_count = 0
likelihood = 0.0
old_likelihood = 0.0
converge = 1.0
log_file = file("%s/log.dat" % result_directory, "w")
log_file.write("iteration time doc.count likelihood\n")
test_log_file = file("%s/test-log.dat" % result_directory, "w")
test_log_file.write("iteration time doc.count score word.count score.split word.count.split\n")
while (options.max_iter == -1 or iter < options.max_iter) and total_time < options.max_time:
t0 = time.clock()
# Run one step iteration.
likelihood = bhdp.em_on_large_data(c_train_filename, options.var_converge, fresh=(iter==0))
if iter > 0:
converge = (likelihood - old_likelihood)/abs(old_likelihood)
old_likelihood = likelihood
print "iter = %d, likelihood = %f, converge = %f" % (iter, likelihood, converge)
if converge < 0:
print "warning, likelihood is decreasing!"
total_time += time.clock() - t0
iter += 1 # increase the iter counter
total_doc_count += options.D
log_file.write("%d %d %d %.5f\n" % (iter, total_time, total_doc_count, likelihood))
log_file.flush()
bhdp.save_topics('%s/doc_count-%d.topics' % (result_directory, total_doc_count))
cPickle.dump(bhdp, file('%s/doc_count-%d.model' % (result_directory, total_doc_count), 'w'), -1)
print "\tworking on predictions."
(lda_alpha, lda_beta) = bhdp.hdp_to_lda()
# prediction on the fixed test in folds
print "\tworking on fixed test data."
test_score = 0.0
test_score_split = 0.0
c_test_word_count_split = 0
for doc in c_test.docs:
(likelihood, gamma) = hdp.lda_e_step(doc, lda_alpha, lda_beta)
test_score += likelihood
(likelihood, count, gamma) = hdp.lda_e_step_split(doc, lda_alpha, lda_beta)
test_score_split += likelihood
c_test_word_count_split += count
test_log_file.write("%d %d %d %.5f %d %.5f %d\n" % (iter, total_time,
total_doc_count, test_score, c_test_word_count,
test_score_split, c_test_word_count_split))
test_log_file.flush()
# prediction on the test set in the folds
# print "\tworking on test data in folds."
# test_folds_log_file = file("%s/doc_count-%d.test.folds" % (result_directory, total_doc_count), "w")
# test_folds_log_file.write("fold doc.id word count score\n")
# for i in range(num_folds):
# train_data = c_test_train_folds[i]
# test_data = c_test_test_folds[i]
# for (doc_id, train_doc, test_doc) in izip(range(train_data.num_docs), train_data.docs, test_data.docs):
# if test_doc.total > 0:
# (likelihood, gamma) = hdp.lda_e_step(train_doc, lda_alpha, lda_beta)
# theta = gamma/np.sum(gamma)
# lda_betad = lda_beta[:, test_doc.words]
# log_predicts = np.log(np.dot(theta, lda_betad))
# log_info = "\n".join(["%d %d %d %d %.5f" % (i, doc_id, word, word_count, f) for (word, word_count, f) in izip(test_doc.words, test_doc.counts, log_predicts)])
# test_folds_log_file.write(log_info + "\n")
# test_folds_log_file.close()
log_file.close()
print "Saving the final model and topics."
bhdp.save_topics('%s/final.topics' % result_directory)
cPickle.dump(bhdp, file('%s/final.model' % result_directory, 'w'), -1)
(lda_alpha, lda_beta) = bhdp.hdp_to_lda()
# prediction on the fixed test in folds
print "\tworking on fixed test data."
test_score = 0.0
test_score_split = 0.0
c_test_word_count_split = 0
for doc in c_test.docs:
(likelihood, gamma) = hdp.lda_e_step(doc, lda_alpha, lda_beta)
test_score += likelihood
(likelihood, count, gamma) = hdp.lda_e_step_split(doc, lda_alpha, lda_beta)
test_score_split += likelihood
c_test_word_count_split += count
test_log_file.write("%d %d %d %.5f %d %.5f %d\n" % (iter, total_time,
total_doc_count, test_score, c_test_word_count,
test_score_split, c_test_word_count_split))
test_log_file.flush()
test_log_file.close()
if __name__ == '__main__':
run_hdp()
|
import pygame
from pygame.locals import *
import sys
from datetime import datetime
BLANCO = (255,255,255)
class Clock( ):
sec_anterior = -1
minutes = 0
seconds = 0
sec_toShow = -1
starting = True
def get_min_sec( self, time ):
mi_sec = time[14] + time[15] + time[16] + time[17] + time[18]
return mi_sec
def getTime( self ):
currentTime = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return currentTime
def showTime( self, minToShow, secToShow, ventana ):
if minToShow < 10:
minToShow = '0' + str( minToShow )
else:
minToShow = str( minToShow )
if secToShow < 10:
secToShow = '0' + str( secToShow )
else:
secToShow = str( secToShow )
timeToShow = minToShow + ':' + secToShow
fuente = pygame.font.Font( None, 30 )
time = fuente.render(timeToShow, 1, BLANCO )
pos = (30, 60 )
ventana.blit( time, pos )
def mainClock( self, ventana, gameover, modoPregunta, modoPausa ):
currentSec = 0
if modoPregunta or modoPausa:
#print "currentSec: %d" % currentSec
#print "sec_anterior: %d" % self.sec_anterior
self.showTime( self.minutes, self.seconds, ventana )
elif not( gameover ):
self.showTime( self.minutes , self.seconds, ventana )
currentTime = self.getTime( )
currentTime = self.get_min_sec( currentTime )
currentSec = int( currentTime[ 3 ] + currentTime[ 4 ] )
if self.starting:
self.starting = False
self.sec_anterior = currentSec
#cada vez que exista un cambio en el clock del sistema, existe un cambio
#en el clock del juego
if currentSec != self.sec_anterior:
self.sec_anterior = currentSec
if self.seconds < 59:
self.seconds += 1
else:
self.minutes += 1
self.seconds = 0
else:
self.showTime( self.minutes, self.seconds, ventana )
|
import matplotlib
matplotlib.use('Agg')
from boids.flock import Flock
from mock import Mock, patch
from nose.tools import assert_equal, assert_almost_equal
import os
import yaml
def test_bad_boids_regression():
regression_data=yaml.load(open(os.path.join(os.path.dirname(__file__),
'Fixtures','fixture.yml'),'r'))
boid_data=regression_data["before"]
flock = Flock.from_data(boid_data)
flock.update_boids()
res = flock.data
for after, calculated in zip(regression_data["after"], res):
for after_value, calculated_value in zip(after, calculated):
assert_almost_equal(after_value, calculated_value, delta=0.01)
def test_move_to_middle_regression():
regression_data=yaml.load(open(os.path.join(os.path.dirname(__file__),
'Fixtures','fixture.yml'),'r'))
flock = Flock.from_data(regression_data['before'])
flock.move_to_middle()
res = flock.data
for after, calculated in zip(regression_data["middle"], res):
for after_value, calculated_value in zip(after, calculated):
assert_almost_equal(after_value, calculated_value, delta=0.01)
def test_avoid_nearby_regression():
regression_data=yaml.load(open(os.path.join(os.path.dirname(__file__),
'Fixtures','fixture.yml'),'r'))
flock = Flock.from_data(regression_data['before'])
flock.avoid_nearby_birds()
res = flock.data
for after, calculated in zip(regression_data["avoid"], res):
for after_value, calculated_value in zip(after, calculated):
assert_almost_equal(after_value, calculated_value, delta=0.01)
def test_match_speed_regression():
regression_data=yaml.load(open(os.path.join(os.path.dirname(__file__),
'Fixtures','fixture.yml'),'r'))
flock = Flock.from_data(regression_data['before'])
flock.match_speed_to_nearby_birds()
res = flock.data
for after, calculated in zip(regression_data["match"], res):
for after_value, calculated_value in zip(after, calculated):
assert_almost_equal(after_value, calculated_value, delta=0.01)
def test_from_data():
data=yaml.load(open(os.path.join(os.path.dirname(__file__),'Fixtures',
'fixture.yml'),'r'))
flock = Flock.from_data(data["before"])
imported = flock.data
for real,got in zip(data["before"],imported):
for real_value, got_value in zip(real,got):
assert_equal(real_value,got_value)
def test_properties():
data=yaml.load(open(os.path.join(os.path.dirname(__file__),'Fixtures',
'fixture.yml')))
flock = Flock.from_data(data["after"])
offset = flock.offset_tuple
true_offset = list(zip(data['after'][0], data['after'][1])) #Caused python3 test to fail
got_data = flock.data
for after,got in zip(data["after"], got_data):
for after_value,got_value in zip(after, got):
assert_almost_equal(after_value, got_value,delta=0.01)
for n, item in enumerate(offset):
assert item[0] == true_offset[n][0]
assert item[1] == true_offset[n][1]
def test_conf_loader():
default_conf = yaml.load(open(os.path.join(os.path.dirname(__file__), '..',
'config.yml'),'r'))
flock = Flock()
for key, item in default_conf.items():
for sub_key in item.keys():
assert getattr(flock, sub_key) == item[sub_key]
non_default_conf = yaml.load(open(os.path.join(os.path.dirname(__file__),
'Fixtures','config.yml'),'r'))
flock = Flock(conf=non_default_conf)
for key, item in non_default_conf.items():
for sub_key in item.keys():
assert getattr(flock, sub_key) == item[sub_key]
@patch('numpy.random.uniform',return_value=[1.])
def test_random_gen(mock_uniform):
f = Flock()
assert mock_uniform.call_count == 4
mock_uniform.assert_any_call(-450.,50.,50.)
mock_uniform.assert_any_call(300.,600.,50.)
mock_uniform.assert_any_call(0.,10.,50.)
mock_uniform.assert_any_call(-20.,20.,50.)
@patch('matplotlib.animation.FuncAnimation')
def test_gen_animation(mock_funcanim):
f = Flock()
f.gen_animation()
mock_funcanim.assert_called_with(f.figure,f.animate,frames=50,interval=50)
@patch('matplotlib.collections.PathCollection.set_offsets')
@patch('boids.Flock.update_boids')
def test_animate(mock_update,mock_scatter):
f = Flock()
f.animate(Mock())
assert mock_update.called
mock_scatter.assert_called_with(f.offset_tuple)
|
"""
This file is part of AWE
Copyright (C) 2012- University of Notre Dame
This software is distributed under the GNU General Public License.
See the file COPYING for details.
"""
import traceback
import os
class TypeException (Exception): pass
class _typecheck(object):
def __init__(self, method=True):
self.method = method
def check(self, *args, **kws):
if self.method:
self.args = [None] + list(args)
else:
self.args = args
self.kws = kws
return self
def typecheck(self, value, expected, name='', arg=-1):
typ = type(value)
if typ is not expected:
raise TypeException, '%s expected: %s, but got: %s' % (name or 'param %s' % arg, expected, typ)
def __call__(self, fn):
def wrapped(*args, **kws):
try:
i = -1
for v, t in zip(args, self.args):
i += 1
if self.method: continue
self.typecheck(v, t, arg=i)
del i
for n, e in self.kws.iteritems():
if n in kws:
self.typecheck(kws[n], e, name=n)
except TypeException, ex:
stack = traceback.extract_stack()
stack = ''.join(traceback.format_list(stack[:-1]))
stack = '\n\t'.join(('\t' + stack).split('\n'))
raise TypeException, '%s:\n\n%s\n\t%s' % (fn, stack, ex)
return fn(*args,**kws)
wrapped.func_name = fn.func_name
wrapped.func_doc = fn.func_doc
return wrapped
class returns(object):
def __init__(self, expected):
self._expected = expected
@property
def expected(self): return self._expected
def typecheck(self, value):
typ = type(value)
return typ is self.expected
def __call__(self, fn):
def wrapped(*args, **kws):
result = fn(*args, **kws)
if self.typecheck(result):
return result
else:
stack = traceback.extract_stack()
stack = ''.join(traceback.format_list(stack[:-1]))
stack = '\n\t'.join(('\t' + stack).split('\n'))
raise TypeError, 'Result of %s(*%s, **%s) should be %s but is %s' % \
(fn, args, kws, self.expected, type(result))
wrapped.func_name = fn.func_name
wrapped.func_doc = '%s -> %s\n\n%s' % (fn.func_name, self.expected, fn.func_doc or '')
return wrapped
def typecheck(*args, **kws):
tc = _typecheck(method=True)
tc.check(*args, **kws)
return tc
def typecheckfn(*args, **kws):
tc = _typecheck(method=False)
tc.check(*args, **kws)
return tc
def deprecated(fn):
def wrapped(*args, **kws):
print 'WARNING: call to deprecated function: %s' % fn.func_name
return fn(*args, **kws)
wrapped.func_name = fn.func_name
wrapped.func_doc = fn.func_doc
return wrapped
def checkpicklable(d):
for v in d.itervalues():
try:
slots = v.__slots__
hasslots = True
except AttributeError:
hasslots = False
try:
getstate = v.__getstate__
hasslots = True
except AttributeError:
hasgetstate = False
if hasslots and not hasgetstate:
print type(v), 'has slots but not __getstate__'
try:
d2 = d.__dict__
checkpicklable(d2)
except AttributeError: pass
def abspath(p): return os.path.abspath(os.path.expanduser(os.path.expandvars(p)))
def makedirs_parent(p):
path = abspath(p)
d = os.path.dirname(path)
if not os.path.exists(d):
os.makedirs(d)
|
"""
hecuba.manager
~~~~~~~~~~~~
Manager CLI
:copyright: (c) 2016 by Hugo Cisneiros.
:license: GPLv2, see LICENSE for more details.
"""
from . import hecuba, db, auth, models, log
from flask.ext.script import Manager, Command, prompt_pass, prompt_choices, prompt_bool
from flask.ext.migrate import Migrate, MigrateCommand
from alembic.config import Config as AlembicConfig
import sys
class RandomPassword(Command):
"""
Prompts the user for a password to be hashed and shown on screen
"""
def run(self):
"""
Prompts the user for a password to be hashed and shown on screen
"""
try:
password_one = prompt_pass("Enter a password")
password_two = prompt_pass("Confirm the password")
except KeyboardInterrupt:
print "Generation cancelled."
sys.exit(1)
if password_one == password_two:
print "Generated hash: " + auth.generate_password(password_one)
else:
print "Passwords don't match."
self.run()
class RunDevServer(Command):
"""
Runs a local development server using hecuba.run (Werkzeug web-server)
"""
def run(self):
"""
Runs a local development server using hecuba.run (Werkzeug web-server)
"""
hecuba.run(
host=hecuba.config['SERVER_LISTEN'],
port=hecuba.config['SERVER_PORT'],
debug=hecuba.config['DEBUG'],
use_reloader=hecuba.config['USE_RELOADER']
)
class ChangeClusterPassword(Command):
"""
Prompts the user to Changes a cluster password
"""
def run(self):
"""
Prompts the user to Changes a cluster password
"""
clusters = []
for c in models.Cluster.query.all():
clusters.append(c.name)
try:
cluster = prompt_choices("Which cluster?", clusters, resolve=str)
password_old = prompt_pass("Enter the current cluster password", "None")
c = models.Cluster.query.get(cluster)
# no password provided
if (password_old == "None"):
# if cluster has a password, obviously deny the request
if (c.secret is not None):
print "Cluster password is wrong."
sys.exit(1)
if (auth.verify_password(cluster, password_old)):
password_one = prompt_pass("Enter the new password", "None")
password_two = prompt_pass("Confirm the new password", "None")
if password_one == password_two:
# if password is None, we will remove the password, not change it
if (password_one == "None" and password_two == "None"):
if (prompt_bool("Do you want to remove the password?", True)):
c.secret = None
action = "removed"
else:
print "Password change cancelled."
sys.exit(1)
else:
c.secret = auth.generate_password(password_one)
action = "changed"
try:
db.session.commit()
log.info("Password for cluster %s was %s." % (cluster, action))
except Exception as ex:
log.debug(ex)
print "Error while changing the cluster password."
sys.exit(1)
else:
print "Passwords don't match."
sys.exit(1)
else:
print "Cluster password is wrong."
sys.exit(1)
except KeyboardInterrupt:
print "Password change cancelled."
sys.exit(1)
manager = Manager(hecuba)
migrate = Migrate(hecuba, db)
GenPasswordCommand = RandomPassword()
RunDevServerCommand = RunDevServer()
ChangeClusterPasswordCommand = ChangeClusterPassword()
manager.add_command('db', MigrateCommand)
manager.add_command('gen-password', GenPasswordCommand)
manager.add_command('change-cluster-password', ChangeClusterPasswordCommand)
manager.add_command('runserver', RunDevServerCommand)
@migrate.configure
def configure_alembica(config):
"""
Dynamically configure alembic to use custom migrations directory
"""
config.set_main_option("script_location", "hecuba:migrations")
return config
def run():
"""
Runs the main CLI command
"""
alembic_cfg = AlembicConfig()
configure_alembica(alembic_cfg)
manager.run()
|
"""Rozwiązanie zadania 203."""
import sys
import re
if len(sys.argv) >= 3:
WORD = str(sys.argv[1])
WORD_LEN = len(WORD)
MAX_N = int(sys.argv[2])+1
else:
MAX_N = 15
WORD = "have"
WORD_LEN = len(WORD)
for line in sys.stdin:
a = [m.start() for m in re.finditer(WORD, line)]
for i in a:
print " ".join((
line[i-MAX_N:i],
line[i:i+WORD_LEN],
line[i+WORD_LEN:i++WORD_LEN+MAX_N]
))
|
"""
/***************************************************************************
RuGeocoder
A QGIS plugin
Geocode your csv files to shp
-------------------
begin : 2012-02-20
copyright : (C) 2012 by Nikulin Evgeniy
email : nikulin.e at gmail
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import QColor
from qgis.gui import QgsRubberBand
from qgis.core import QGis, QgsRectangle, QgsCoordinateReferenceSystem, QgsCoordinateTransform
class RubberBandResultRenderer():
def __init__(self, iface):
self.iface = iface
self.rb = QgsRubberBand(self.iface.mapCanvas(), QGis.Point)
self.rb.setColor(QColor('magenta'))
self.rb.setIconSize(12)
self.srs_wgs84 = QgsCoordinateReferenceSystem(4326)
self.transformation = QgsCoordinateTransform(self.srs_wgs84, self.srs_wgs84)
def show_point(self, point, center=False):
#check srs
if self.need_transform():
point = self.transform_point(point)
self.rb.addPoint(point)
if center:
self.center_to_point(point)
def clear(self):
self.rb.reset(QGis.Point)
def need_transform(self):
return self.iface.mapCanvas().mapRenderer().destinationCrs().postgisSrid() != 4326
def transform_point(self, point):
dest_srs_id = self.iface.mapCanvas().mapRenderer().destinationCrs().srsid()
self.transformation.setDestCRSID(dest_srs_id)
try:
return self.transformation.transform(point)
except:
print 'Error on transform!' # DEBUG! need message???
return
def center_to_point(self, point):
canvas = self.iface.mapCanvas()
new_extent = QgsRectangle(canvas.extent())
new_extent.scale(1, point)
canvas.setExtent(new_extent)
canvas.refresh()
|
import pandas as pd
from pandas import DataFrame
df = pd.read_csv('Graduation_Outcomes_by_SchoolLevel_Gender_2005-2011.csv', index_col = 'School Name')
df2 = df[df['Total Cohort Num'] > 5]
df3 = df2[range(1,8)]
print df3.head()
|
import sys
from app.models import (
CommonColumns,
Owner,
User,
LotUser,
Lot,
StreamLot,
Stream,
Hashtag,
TweetHashtag,
Mention,
TweetMention,
URL,
TweetURL,
Media,
TweetMedia,
Tweet,
app,
db
)
db.drop_all(app=app)
db.create_all(app=app)
|
"""Thread-safe Least Recently Used (LRU) cache for storing tiles."""
from __future__ import with_statement
from threading import RLock, Thread
from collections import deque
import time
class TileCache(object):
"""TileCache objects are used for caching tiles in memory.
Tiles can be accessed in much that same way as `dict` objects:
`tilecache[tile_id]` holds the tile identified by the given `tile_id`.
Constructor: TileCache(int, int)
"""
def __init__(self, maxsize=256, maxage=60):
"""Create a new TileCache object.
The maximum number of tiles to store is set by `maxsize`. There will be
no limit if `maxsize` <= 0.
The maximum age of the tiles (in seconds) allowed before they are
discarded is set by `maxsize`. There will be no limit if `maxage` <= 0.
None tiles and (0,0,0) tiles do not count towards the number of stored
tiles and will therefore not be automatically discarded.
"""
self.__maxsize = maxsize
self.__maxage = maxage
self.__d = {}
self.__atime = {}
self.__anum = {}
self.__maxaccesses = {}
self.__discard_queue = deque()
self.__num_tiles = 0
self.__lock = RLock()
self.__periodic_clean_thread = Thread(target=self.__periodic_clean)
self.__periodic_clean_thread.setDaemon(True)
self.__periodic_clean_thread.start()
def insert(self, tile_id, tile, maxaccesses=0):
"""Insert the `tile` with the given `tile_id` into the cache.
If `maxaccesses` <= 0, then the behaviour is the same as
`tilecache[tile_id]=tile`. Otherwise the tile is set to expire after it
has been accessed `maxaccesses` times.
insert(tuple<string,int,int,int>, object, int) -> None
"""
with self.__lock:
self[tile_id] = tile
if maxaccesses > 0:
self.__maxaccesses[tile_id] = maxaccesses
# def expire(self):
# """Expire all tiles that have been set to expire after having been
# accessed a certain number of times (i.e. if `insert` has been called
# with `maxaccesses` > 0).
#
# expire() -> None
# """
# with self.__lock:
# tile_ids = self.__maxaccesses.keys()
# for tile_id in tile_ids:
# del self[tile_id]
# def temporary(self, tile_id):
# """Return True iff the tile with the given `tile_id` has been set to
# expire after having been accessed a certain number of times (i.e. if
# `insert` has been called with `maxaccesses` > 0).
#
# temporary(tuple<string,int,int,int>) -> bool
# """
# return tile_id in self.__maxaccesses
def __mortal(self, tile_id, tile):
"""Returns a bool indicating whether the given tile is mortal.
The tile will never be removed from the cache if it is immortal.
None tiles and (0,0,0) tiles are the only ones considered immortal.
__mortal(tuple<string,int,int,int>, object) -> bool
"""
return tile is not None and tile_id[1] != 0
def __getitem__(self, tile_id):
with self.__lock:
if tile_id in self.__d:
tile = self.__d[tile_id]
if self.__mortal(tile_id, tile):
## move this tile to the back of the discard queue
self.__discard_queue.remove(tile_id)
self.__discard_queue.append(tile_id)
self.__atime[tile_id] = int(time.time())
self.__anum[tile_id] = self.__anum.get(tile_id, 0) + 1
if tile_id in self.__maxaccesses and \
self.__anum[tile_id] >= self.__maxaccesses[tile_id]:
## tile has expired
del self[tile_id]
return tile
else:
raise KeyError
def __periodic_clean(self):
"""Periodically remove old tiles based on maxage.
__periodic_clean() -> infinite loop
"""
while True:
## make sure the age of tiles never exceeds 4/3 maxage
time.sleep(self.__maxage/3)
with self.__lock:
while self.__maxage > 0 and self.__discard_queue and \
time.time() - self.__atime[self.__discard_queue[0]] \
> self.__maxage:
tile_id = self.__discard_queue[0]
del self[tile_id]
def __clean(self):
"""Remove the least recently used tiles based on maxsize.
__clean() -> None
"""
with self.__lock:
while self.__maxsize > 0 and self.__num_tiles > self.__maxsize:
tile_id = self.__discard_queue[0]
del self[tile_id]
def __setitem__(self, tile_id, tile):
with self.__lock:
if tile_id in self:
if tile is None:
## don't replace an existing tile with a None tile
return
else:
del self[tile_id]
self.__d[tile_id] = tile
if self.__mortal(tile_id, tile):
self.__discard_queue.append(tile_id)
self.__atime[tile_id] = int(time.time())
self.__num_tiles += 1
self.__clean()
elif tile_id not in self.__d:
self.__d[tile_id] = None
def __delitem__(self, tile_id):
with self.__lock:
if self.__mortal(tile_id, self.__d[tile_id]):
self.__discard_queue.remove(tile_id)
del self.__atime[tile_id]
self.__num_tiles -= 1
if tile_id in self.__maxaccesses:
del self.__maxaccesses[tile_id]
del self.__d[tile_id]
def __contains__(self, tile_id):
with self.__lock:
return tile_id in self.__d
def purge(self):
"""Purge all tiles from the cache.
purge() -> None
"""
self.__d = {}
self.__atime = {}
self.__anum = {}
self.__maxaccesses = {}
self.__discard_queue = deque()
self.__num_tiles = 0
|
import pickle
import os
import os.path
import re
import shutil
import sys
import tempfile
import threading
import urllib
import urlparse
import quodlibet
import quodlibet.config
import quodlibet.formats
import quodlibet.library
from qlsync import *
from qlsync.shifters import ShifterError
def ascify(s):
"""Convert Unicode string to ASCII, discarding out-of-bounds characters."""
return s.encode('ascii', 'ignore')
class Device(object):
"""Access to files on the device."""
def __init__(self, name, musicdir, shifter, flatten = False):
self.name = name
self.musicdir = musicdir
self.shifter = shifter
self.flatten = flatten
self.clear_non_persistent()
def clear_non_persistent(self):
"""Clear non-persistent settings."""
self.playlist_files = {} # indexed by playlist_name, of set of music files
self.all_songs = set()
def __str__(self):
return "Device(name=" + self.name + ",musicdir=" + self.musicdir + ",flatten=" + str(self.flatten) + \
",shifter=" + str(self.shifter) + ")"
def playlist_dir(self):
return os.path.join(self.musicdir, "qlsync")
def musicfile_playlist_path(self, musicfile):
"""Return a relative path to music from playlist."""
return os.path.join("..", musicfile) # because playlists are put into qlsync subdir
def playlist_file(self, playlist_name):
# We don't use a playlist file extension, as Android seems to
# keep these playlists around after we have deleted them.
# If you want actual playlists on your device, copy them by
# hand - or use a future version of this program ;-)
return os.path.join("qlsync", playlist_name + ".qls")
def musicfile_actual_path(self, musicfile_in_playlist):
"""Given a musicfile in a device playlist, return the actual pathname. The opposite of musicfile_playlist_path."""
return os.path.normpath(os.path.join("qlsync", musicfile_in_playlist))
def scan(self):
"""Get device storage space and all the playlists from the device, by looking in the playlist dir for qls files."""
self.clear_non_persistent()
self.shifter.open()
usage = self.shifter.get_storage_space(self.musicdir)
if self.shifter.path_exists(self.playlist_dir()):
for f in self.shifter.ls(self.playlist_dir()):
m = re.match(r'^(.*)\.qls', f)
if m:
playlist_file = m.group(0)
playlist_name = m.group(1)
playlist_contents = self.shifter.readlines(os.path.join(self.playlist_dir(), playlist_file))
self.playlist_files[playlist_name] = set(playlist_contents)
self.all_songs.update(self.playlist_files[playlist_name])
self.shifter.close()
return usage
# TODO remove obsolete:
# def playlist_name(self, playlistfile):
# m = re.match(r'^(.*)\.m3u', f)
# if m:
# return m.group(1)
# else:
# return None
def flush(self):
"""Clean up and flush out changes."""
self.shifter.flush()
self.shifter.close()
class Settings(object):
def __init__(self, configFile = os.path.expanduser("~/.qlsync")):
self.configFile = configFile
self.devices = [] # list of Device
self.currentDeviceIndex = None # index into devices
self.load()
def load(self):
if os.path.exists(self.configFile):
try:
f = open(self.configFile, 'r')
settings = pickle.load(f)
currentDeviceIndex = pickle.load(f)
# all OK, so store them
self.devices = settings
self.currentDeviceIndex = currentDeviceIndex
f.close()
except Exception as e:
oldConfigFile = "%s.old" % self.configFile
print("load settings failed: %s, renaming %s as %s" % (str(e), self.configFile, oldConfigFile))
os.rename(self.configFile, oldConfigFile)
self.clear_non_persistent()
def save(self):
self.clear_non_persistent()
try:
f = open(self.configFile, 'w')
os.fchmod(f.fileno(), 0600) # might have an FTP password here
pickle.dump(self.devices, f)
pickle.dump(self.currentDeviceIndex, f)
f.close()
except IOError as e:
print "save settings failed: " + str(e)
def clear_non_persistent(self):
"""Clear non-persistent settings."""
for d in self.devices:
d.clear_non_persistent()
def store_device(self, device, deviceIndex):
device_list_changed = False
if deviceIndex == None:
self.devices.append(device)
deviceIndex = len(self.devices) - 1
device_list_changed = True
else:
self.devices[deviceIndex] = device
self.currentDeviceIndex = deviceIndex
self.save()
if device_list_changed:
self.notify_device_list_changed()
self.notify_current_device_changed()
def delete_device(self, deviceIndex):
del self.devices[deviceIndex]
if self.currentDeviceIndex == None:
pass
elif len(self.devices) == 0:
self.currentDeviceIndex == None
elif self.currentDeviceIndex == deviceIndex:
self.currentDeviceIndex = 0
elif self.currentDeviceIndex > deviceIndex:
self.currentDeviceIndex -= 1
self.save()
self.notify_current_device_changed()
self.notify_device_list_changed()
def watch_current_device(self, callback):
self.current_device_callback = callback
def notify_current_device_changed(self):
if hasattr(self, 'current_device_callback'):
self.current_device_callback()
def watch_device_list(self, callback):
self.device_list_callback = callback
def notify_device_list_changed(self):
if hasattr(self, 'device_list_callback'):
self.device_list_callback()
def flattened_path(path):
"""Flatten a path, i.e. squash all directory names into a single filename."""
dirname, flattened = os.path.split(path)
while dirname != "" and dirname != "/":
dirname, basename = os.path.split(dirname)
flattened = basename + " - " + flattened
return flattened
class M3UFile(object):
"""An m3u format playlist, written to a tempfile."""
def __init__(self, filename, library):
self.f = open(filename, "w")
self.library = library
def append(self, musicFile):
self.f.write(musicFile + "\n")
def close(self):
self.f.close()
def tracknumber_as_int(song):
tracknumber = song['tracknumber']
slash_pos = tracknumber.find('/')
if slash_pos >= 0:
return int(tracknumber[:slash_pos])
else:
return int(tracknumber)
class RefinedAlbum:
def __init__(self, album_name, artist, tracks):
self.artist = artist
self.name = album_name
self.songs = []
for track in range(1, 1 + len(tracks.keys())):
self.songs.append(tracks[track])
def has_all_tracks(dict, min = 2):
"""Need at least min tracks."""
has_all = sorted(dict.keys()) == list(range(1, 1 + len(dict.keys()))) and len(dict) >= min
return has_all
def refine_album(composite_album):
"""May have to split an album up, e.g. Greatest Hits may be several albums rolled into one."""
refined_albums = []
component_albums = {}
bad_album = False
artist = None
album_name = None
track = None
for song in composite_album.songs:
try:
artist = song['artist']
album_name = song['album']
track = tracknumber_as_int(song)
except KeyError:
bad_album = True
if not bad_album:
if not component_albums.has_key(album_name):
component_albums[album_name] = {}
if not component_albums[album_name].has_key(artist):
component_albums[album_name][artist] = {}
component_albums[album_name][artist][track] = song
if bad_album:
component_albums = {}
sys.stderr.write("Badly tagged album: %s - %s\n" % (str(artist), str(album_name)))
for album_name in component_albums.keys():
various_artists = {}
for artist in component_albums[album_name].keys():
# if we've got consecutive tracks, then make it a standalone album
if has_all_tracks(component_albums[album_name][artist]):
refined_albums.append(RefinedAlbum(album_name, artist, component_albums[album_name].pop(artist)))
else:
# collect into various artists
for track in component_albums[album_name][artist].keys():
various_artists[track] = component_albums[album_name][artist][track]
# anything remaining may be a various artists album
if len(various_artists) == 0:
pass
elif has_all_tracks(various_artists):
refined_albums.append(RefinedAlbum(album_name, "Various", various_artists))
else:
# find an artist for the incomplete album
artist = None
for track in range(1, 20):
try:
artist = various_artists[track]['artist']
except KeyError:
pass
if artist is not None:
break
if artist is None:
artist = "Unknown"
sys.stderr.write("Incomplete album: %s - %s - %s\n" % (artist, album_name, [track for track in sorted(various_artists.keys())]))
return refined_albums
def album_name_and_artist(album):
"""If all songs in the album are by the same artist, that is it, else Various."""
artist = None
album_name = None
for song in album.songs:
if album_name is None:
album_name = song['album']
elif song['album'] != album_name:
# this shouldn't happen, all should be the same
raise KeyError
if artist is None:
artist = song['artist']
elif song['artist'] != artist:
artist = "Various"
return album_name, artist
class Library(object):
"""Access to music files and playlists."""
def __init__(self):
quodlibet.config.init(quodlibet.const.CONFIG)
quodlibet.formats.init()
scanSettings = quodlibet.config.get("settings", "scan")
self.musicdirs = scanSettings.split(":")
self.playlist_dir = os.path.expanduser("~/.quodlibet/playlists")
self.quodlibet_library = quodlibet.library.init(quodlibet.const.LIBRARY)
def playlists(self):
return sorted(os.listdir(self.playlist_dir))
def relativise(self, abspath):
"""Return the path relative to a library root, or None."""
relpath = None
matchlen = 0
for ldir in self.musicdirs:
if abspath.startswith(ldir):
# want longest match, so we pick correctly between say
# /path/to/mp3 and /path/to/mp3-extra
if relpath == None or len(ldir) > matchlen:
matchlen = len(ldir)
relpath = abspath[matchlen:].lstrip("/")
return relpath
def playlist_files(self, playlist):
"""Iterator for files in a playlist, which yields in pairs, (relpath, abspath)."""
pf = open(os.path.join(self.playlist_dir, playlist))
for line in pf:
abspath = line.rstrip("\n")
relpath = self.relativise(abspath)
if relpath != None:
yield(relpath, abspath)
pf.close()
def album_playlist(self, prefix, album_artist, album_name):
playlist_file = "%s%s - %s" % (prefix, ascify(album_artist), ascify(album_name))
return os.path.join(self.playlist_dir, urllib.quote(playlist_file, safe = "-"))
def create_album_playlists(self, prefix = ""):
"""Create a playlist for each album in the library."""
keys = self.quodlibet_library.albums.keys()
for key in keys:
album = self.quodlibet_library.albums[key]
for refined_album in refine_album(album):
with open(self.album_playlist(prefix, refined_album.artist, refined_album.name), 'w') as playlist:
for song in refined_album.songs:
playlist.write("%s\n" % song['~filename'])
class Syncer(object):
"""Playlist synchronizer.
Queue up file copies and deletions, ensuring not to delete files which are required.
Files are not copied if they are already in the device playlist.
"""
def __init__(self):
self.library = Library()
self.tmpdir = tempfile.mkdtemp()
self.scribe = None
self.scan_library()
self.device_storage = (None, None) # unknown
def scan_library(self):
"""Scan for quodlibet playlists."""
# Keep Quodlibet's encoded playlist names, as trying to store
# decoded names is troublesome.
self.playlists = self.library.playlists()
self.playlist_names = []
self.playlist_index_by_name = {}
self.playlists_on_device = [False] * len(self.playlists) # array of boolean
i = 0
for playlist in self.playlists:
playlist_name = urlparse.unquote(playlist)
self.playlist_names.append(playlist_name)
self.playlist_index_by_name[playlist] = i
i += 1
self.notify_playlists_changed()
def scan_device(self, device):
"""Scan what is on the device."""
usage = device.scan()
for playlist_name in device.playlist_files.keys():
if self.playlist_index_by_name.has_key(playlist_name):
i = self.playlist_index_by_name[playlist_name]
self.playlists_on_device[i] = True
self.notify_playlists_on_device_changed()
self.notify_device_storage_changed(usage)
def get_device_storage(self):
return self.device_storage
def sync_device(self, device, playlists_wanted, label_callback, progress_callback):
"""Sync playlists, adding and deleting so that only playlists_wanted are present."""
print "writing data to device (do not unplug) ..."
self.scribe = Scribe(device, label_callback, progress_callback)
for i in range(len(self.playlists)):
if playlists_wanted[i]:
# wanted playlist, ensure it and all songs are on device
self.create_and_queue_playlist(self.playlists[i],
self.playlists[i],
device,
self.scribe)
elif self.playlists[i] in device.playlist_files.keys():
# unwanted playlist on device, so delete
self.delete_playlist(self.playlists[i], device, self.scribe)
self.scribe.start()
def cancel_sync(self):
if self.scribe != None:
self.scribe.cancel()
def sync_device_completed(self, progress, device):
print "waiting for scribe"
self.scribe.join()
self.scribe = None
# update local state to reflect what we did. This is non optimal, but works:
self.scan_device(device)
print "%d%% done, you may safely remove your device" % int(progress * 100)
def cleanup(self):
shutil.rmtree(self.tmpdir)
def watch_playlists(self, callback):
self.playlists_callback = callback
def notify_playlists_changed(self):
if hasattr(self, 'playlists_callback'):
self.playlists_callback()
def watch_playlists_on_device(self, callback):
self.playlists_on_device_callback = callback
def notify_playlists_on_device_changed(self):
if hasattr(self, 'playlists_on_device_callback'):
self.playlists_on_device_callback()
def watch_device_storage(self, callback):
self.device_storage_callback = callback
def notify_device_storage_changed(self, usage):
if hasattr(self, 'device_storage_callback'):
self.device_storage_callback(usage)
def create_and_queue_playlist(self, playlist, playlist_name, device, scribe):
"""Copy over any songs which have changed."""
new_playlist = playlist_name not in device.playlist_files.keys()
playlist_files = []
# queue copies for new files
for relpath,abspath in self.library.playlist_files(playlist):
if device.flatten:
devicepath = flattened_path(relpath)
else:
devicepath = relpath
devicepath_in_playlist = device.musicfile_playlist_path(devicepath)
playlist_files.append(devicepath_in_playlist)
if devicepath_in_playlist not in device.all_songs:
scribe.queue_copy(abspath, devicepath)
# determine whether we need to copy playlist itself, and queue deletions for obsolete files
if new_playlist:
need_to_copy_playlist = True
else:
playlist_files_set = set(playlist_files)
for musicfile in (device.playlist_files[playlist_name] - playlist_files_set):
scribe.queue_delete(device.musicfile_actual_path(musicfile))
need_to_copy_playlist = (playlist_files_set != device.playlist_files[playlist_name])
# copy playlist file if required
if need_to_copy_playlist:
m3uFile = os.path.join(self.tmpdir, playlist + ".m3u")
f = open(m3uFile, "w")
f.write("\n".join(playlist_files))
f.close
scribe.queue_copy_playlist(m3uFile, device.playlist_file(playlist_name))
def delete_playlist(self, playlist_name, device, scribe):
for musicfile in device.playlist_files[playlist_name]:
scribe.queue_delete(device.musicfile_actual_path(musicfile))
scribe.queue_delete_playlist(device.playlist_file(playlist_name))
class Scribe(threading.Thread):
"""Deletes then copies data onto the device in a background thread. Playlists are copied/deleted after their music files."""
def __init__(self, device, label_callback, progress_callback):
super(Scribe, self).__init__()
self.device = device
self.label_callback = label_callback
self.progress_callback = progress_callback
self.actions = [] # list of (src,dst), with src = None for deletions
self.copies = [] # list of (src,dst)
self.deletions = [] # list of dst
self.deleteRequired = {} # indexed by dst, of deleteRequired
self.n_copies = 0
self.n_deletions = 0
self.cancel_event = threading.Event()
def cancel(self):
self.cancel_event.set()
def queue_copy(self, src, dst):
print("queue_copy(%s, %s)" % (src, dst))
self.n_copies += 1
self.copies.append((src,dst))
self.deleteRequired[dst] = False
def queue_copy_playlist(self, src, dst):
print("queue_copy_playlist(%s, %s)" % (src, dst))
self.n_copies += 1
self.copies.append((src,dst))
def queue_delete(self, dst):
print("queue_delete(%s)" % dst)
if not self.deleteRequired.has_key(dst):
self.deleteRequired[dst] = True
self.n_deletions += 1
self.deletions.append(dst)
def queue_delete_playlist(self, dst):
print("queue_delete_playlist(%s)" % dst)
if not self.deleteRequired.has_key(dst):
self.deleteRequired[dst] = True
self.n_deletions += 1
self.deletions.append(dst)
def check_cancelled(self):
if not self.cancelled:
self.cancelled = self.cancel_event.wait(0)
return self.cancelled
def run(self):
"""Copy all wanted files, and delete unwanted."""
self.cancelled = False
progress = 0.0
# let GUI know what we're up to
self.label_callback(str(self.n_copies) + " files to copy and " + str(self.n_deletions) + " to delete")
# go for it
self.device.shifter.open()
dstRoot = self.device.musicdir
delDirs = {}
i_copy = 0
i_delete = 0
# deletions
for dst in self.deletions:
if self.check_cancelled():
break
dstFile = os.path.join(dstRoot, dst)
dstDir = os.path.dirname(dstFile)
if self.deleteRequired.has_key(dst) and self.deleteRequired[dst]:
i_delete += 1
print "removefile", i_delete, "of", self.n_deletions, ": ", dstFile
delDirs[dstDir] = True
try:
self.device.shifter.removefile(dstFile)
except ShifterError as e:
# we don't care if this fails
print "ignoring error", e
pass
progress = i_delete * 1.0 / (self.n_copies + self.n_deletions)
self.progress_callback(progress)
for dstDir in delDirs.keys():
if self.check_cancelled():
break
self.device.shifter.removedir_if_empty(dstDir)
for src,dst in self.copies:
if self.check_cancelled():
break
dstFile = os.path.join(dstRoot, dst)
dstDir = os.path.dirname(dstFile)
if src is not None:
i_copy += 1
print "uploadfile", i_copy, "of", self.n_copies, ": ", dstFile
self.device.shifter.makedirs(dstDir)
self.device.shifter.uploadfile(src, dstFile)
progress = (self.n_deletions + i_copy) * 1.0 / (self.n_copies + self.n_deletions)
self.progress_callback(progress)
if not self.cancelled:
progress = 1
self.device.flush()
self.progress_callback(progress, True) # complete
|
"""
WSGI config for questproj project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "questproj.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
import udj
class OwnerLibTestCases(udj.testhelpers.tests06.testclasses.LibTestCases):
username='kurtis'
userpass='testkurtis'
class AdminLibTestCases(udj.testhelpers.tests06.testclasses.LibTestCases):
username='lucas'
userpass='testlucas'
|
from django.contrib import admin
from ds.models import DService
class DServiceAdmin(admin.ModelAdmin):
list_display = ('DS_Type', 'DS_TiersDemandeur', 'DS_Sujet', 'statut')
list_filter = ('DS_Type', 'DS_TiersDemandeur', 'DS_Sujet', 'statut')
search_fields = ['DS_Type', 'DS_TiersDemandeur', 'DS_Sujet' ]
admin.site.register(DService, DServiceAdmin)
|
from src import EventManager, ModuleManager, utils
TAG = utils.irc.MessageTag(None, "inspircd.org/bot")
class Module(ModuleManager.BaseModule):
@utils.hook("received.message.private")
@utils.hook("received.message.channel")
@utils.kwarg("priority", EventManager.PRIORITY_HIGH)
def message(self, event):
if TAG.present(event["tags"]):
event.eat()
|
'''
@author : quanticio44
@contact : quanticio44@gmail.com
@license : See with Quanticio44
@summary : Restore operation
@since : 22/08/2014
'''
import os
import zipfile
import tarfile
import tempfile
import subprocess
import core.common.tools
import core.common.backupstoredbfile
class restoreFileSystem(object):
''' restoreFileSystem class definition '''
verbose = False
archive = None
target = None
CompressionType = 'zip'
def __init__(self, archive, target, verbose):
''' Constructor
@param archive: Path of archive file (format zip, tar, gzip, bz2 (automatic research)
@param target: Target folder
@param verbose: Verbosity mode '''
self.verbose = verbose
if self.verbose:
print("Execute restoreFileSystem with this option :\narchive=%s\ntarget=%s" % (archive, target))
self.CompressionType = core.common.tools.getArchiveCompressionType(archive)
if os.path.isfile(target):
raise IOError("Restore impossible : The target path is a file, a folder is required")
elif not os.path.isdir(target):
os.mkdir(target)
self.archive = archive
self.target = target
def restoreAll(self):
''' Restore all archive '''
if self.verbose:
print("Execute restoreFileSystem.restoreAll")
if self.CompressionType == 'zip':
with zipfile.ZipFile(self.archive, 'r') as currentzip:
currentzip.extractall(self.target)
elif self.CompressionType in ('tar', 'gzip', 'bz2'):
mode = 'r'
if self.CompressionType == 'gzip':
mode += ':gz'
elif self.CompressionType == 'bz2':
mode += ':bz2'
with tarfile.open(self.archive, mode) as currenttar:
currenttar.extractall(self.target)
def restoreById(self, ids):
''' Restore a list ids '''
if self.verbose:
print("Execute restoreFileSystem.restoreById with this option :\nids=%s" % str(ids))
backupStoreFile = self.__getTemporaryBackupStoreFile()
obj_backupstoredbfile = core.common.backupstoredbfile.BackupStoreDbFile(backupStoreFile)
obj_backupstoredbfile.open()
objList = []
ids = ids.split(",")
for Id in ids:
obj = obj_backupstoredbfile.getFSObjById(int(Id.strip()), WithParent=True)
if obj is not None and obj.isFolder():
objList.extend(obj_backupstoredbfile.getSubTree(int(Id.strip()), WithParent=True))
else:
objList.append(obj)
obj_backupstoredbfile.close()
os.remove(backupStoreFile)
os.rmdir(os.path.dirname(backupStoreFile))
lstReleaseId = []
if self.CompressionType == 'zip':
with zipfile.ZipFile(self.archive, 'r') as currentzip:
for i, obj in enumerate(objList):
if obj is not None:
# Filtering between the same id
if obj.id not in lstReleaseId:
currentzip.extract(obj.path + os.sep + obj.name, self.target)
lstReleaseId.append(obj.id)
else:
print("Warning: id(%s) is unknown. The file will not be restored." % ids[i])
elif self.CompressionType in ('tar', 'gzip', 'bz2'):
mode = 'r'
if self.CompressionType == 'gzip':
mode += ':gz'
elif self.CompressionType == 'bz2':
mode += ':bz2'
with tarfile.open(self.archive, mode) as currenttar:
for i, obj in enumerate(objList):
if obj is not None:
# Filtering between the same id
if obj.id not in lstReleaseId:
currenttar.extract('.' + obj.relativePath + '/' + obj.name, self.target)
lstReleaseId.append(obj.id)
else:
print("Warning: id(%s) is unknown. The file will not be restored." % ids[i])
def __getTemporaryBackupStoreFile(self):
''' Get a temporary backupStoreFile (.backupstore) '''
tmp = tempfile.gettempdir() + os.sep + os.path.basename(self.archive)
if self.CompressionType == 'zip':
obj_zipfile = zipfile.ZipFile(self.archive, 'r')
obj_zipfile.extract('.backupstore', tmp)
obj_zipfile.close()
else:
currenttar = None
if self.CompressionType == 'tar':
currenttar = tarfile.open(self.archive, 'r')
elif self.CompressionType == 'gzip':
currenttar = tarfile.open(self.archive, 'r:gz')
elif self.CompressionType == 'bz2':
currenttar = tarfile.open(self.archive, 'r:bz2')
if currenttar is not None:
currenttar.extract('./.backupstore', tmp)
currenttar.close()
else:
raise IOError("Research impossible : The archive path is not a compatible archive")
return tmp + os.sep + '.backupstore'
class dataBase(object):
''' Restore database class definition '''
def __init__(self, dataBase, restoreFile, tools='pg_restore', host='localhost', port='5432', user='postgres', password=None, dumptype='std', verbose=True):
''' Constructor
@param dataBase: String type for database name
@param target: Target file
@param tools: Tool to use (ex: pg_dump)
@param host: String type for host server of database (localhost by default)
@param port: String type for port instance of database (5432 by default)
@param user: String type for database user (postgres by default)
@param password: String type for database password (empty by default (trust authen))
@param dumptype: std (bin) or sql
@param verbose: Verbosity mode '''
self.result = False
dumptype = dumptype.lower()
if not dumptype:
dumptype = 'std'
if password == '':
password = None
if not verbose and dumptype == 'sql':
trace = '--quiet'
elif verbose and dumptype != 'sql':
trace = '--verbose'
else:
trace = ''
if dumptype == 'sql':
if password is None:
result = subprocess.call('"%s" -h %s -p %s -U %s -w -F plain %s -f "%s"' %(tools, host, port, user, trace, restoreFile),stderr=subprocess.STDOUT, shell=True)
else:
result = subprocess.call('"%s" -h %s -p %s -U %s -W %s -F plain %s -f "%s"' %(tools, host, port, user, password, trace, restoreFile), stderr=subprocess.STDOUT, shell=True)
else:
if password is None:
result = subprocess.call('"%s" -h %s -p %s -U %s -w -F custom %s -d %s "%s"' %(tools, host, port, user, trace, dataBase, restoreFile),stderr=subprocess.STDOUT, shell=True)
else:
result = subprocess.call('"%s" -h %s -p %s -U %s -W %s -F custom %s -d %s "%s"' %(tools, host, port, user, password, trace, dataBase, restoreFile), stderr=subprocess.STDOUT, shell=True)
if result == 0:
self.result = True
if verbose:
print "The %s database was imported from : %s" % (dataBase, restoreFile)
elif verbose:
print "Fail to import %s database from : %s" % (dataBase, restoreFile)
def getResult(self):
''' Getting result of restoration operation '''
return self.result
|
"""
RED Plugin
Copyright (C) 2014-2015 Matthias Bolte <matthias@tinkerforge.com>
program_info_delphi.py: Program Delphi Info Widget
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from brickv.plugin_system.plugins.red.program_info import ProgramInfo
from brickv.plugin_system.plugins.red.program_utils import Constants
from brickv.plugin_system.plugins.red.ui_program_info_delphi import Ui_ProgramInfoDelphi
from brickv.plugin_system.plugins.red.program_info_delphi_compile import ProgramInfoDelphiCompile
class ProgramInfoDelphi(ProgramInfo, Ui_ProgramInfoDelphi):
def __init__(self, context):
ProgramInfo.__init__(self, context)
self.setupUi(self)
self.compile_dialog = None
self.check_show_advanced_options.stateChanged.connect(self.update_ui_state)
# overrides ProgramInfo.update_ui_state
def update_ui_state(self):
show_advanced_options = self.check_show_advanced_options.isChecked()
# start mode
start_mode_api_name = self.program.cast_custom_option_value('delphi.start_mode', str, '<unknown>')
start_mode = Constants.get_delphi_start_mode(start_mode_api_name)
start_mode_executable = start_mode == Constants.DELPHI_START_MODE_EXECUTABLE
self.label_start_mode.setText(Constants.delphi_start_mode_display_names[start_mode])
# executable
self.label_executable_title.setVisible(start_mode_executable)
self.label_executable.setVisible(start_mode_executable)
self.label_executable.setText(self.program.cast_custom_option_value('delphi.executable', str, ''))
# compile from source
compile_from_source = self.program.cast_custom_option_value('delphi.compile_from_source', bool, False)
if compile_from_source:
self.label_compile_from_source.setText('Enabled')
else:
self.label_compile_from_source.setText('Disabled')
# build system
build_system_api_name = self.program.cast_custom_option_value('delphi.build_system', str, '<unknown>')
build_system = Constants.get_delphi_build_system(build_system_api_name)
build_system_fpcmake = build_system == Constants.DELPHI_BUILD_SYSTEM_FPCMAKE
build_system_lazbuild = build_system == Constants.DELPHI_BUILD_SYSTEM_LAZBUILD
self.label_build_system_title.setVisible(compile_from_source)
self.label_build_system.setVisible(compile_from_source)
self.label_build_system.setText(Constants.delphi_build_system_display_names[build_system])
# working directory
self.label_working_directory_title.setVisible(show_advanced_options)
self.label_working_directory.setVisible(show_advanced_options)
self.label_working_directory.setText(self.program.working_directory)
# make options
self.label_make_options_title.setVisible(compile_from_source and show_advanced_options and build_system_fpcmake)
self.label_make_options.setVisible(compile_from_source and show_advanced_options and build_system_fpcmake)
self.label_make_options.setText('\n'.join(self.program.cast_custom_option_value_list('delphi.make_options', str, [])))
# lazbuild options
self.label_lazbuild_options_title.setVisible(compile_from_source and show_advanced_options and build_system_lazbuild)
self.label_lazbuild_options.setVisible(compile_from_source and show_advanced_options and build_system_lazbuild)
self.label_lazbuild_options.setText('\n'.join(self.program.cast_custom_option_value_list('delphi.lazbuild_options', str, [])))
# overrides ProgramInfo.close_all_dialogs
def close_all_dialogs(self):
if self.compile_dialog != None:
self.compile_dialog.close()
# overrides ProgramInfo.get_language_action
def get_language_action(self):
if self.program.cast_custom_option_value('delphi.compile_from_source', bool, False):
return self.compile_from_source, 'Compile'
else:
return ProgramInfo.get_language_action(self)
def compile_from_source(self):
if not self.program.cast_custom_option_value('delphi.compile_from_source', bool, False):
return
self.compile_dialog = ProgramInfoDelphiCompile(self, self.script_manager, self.program)
self.compile_dialog.exec_()
self.compile_dialog = None
|
import os
import glob
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
import pisi.context as ctx
import pisi.actionsapi
import pisi.actionsapi.get as get
from pisi.actionsapi.shelltools import system
from pisi.actionsapi.shelltools import can_access_file
from pisi.actionsapi.shelltools import export
from pisi.actionsapi.shelltools import unlink
class ConfigureError(pisi.actionsapi.Error):
def __init__(self, value=''):
pisi.actionsapi.Error.__init__(self, value)
self.value = value
ctx.ui.error(value)
class MakeError(pisi.actionsapi.Error):
def __init__(self, value=''):
pisi.actionsapi.Error.__init__(self, value)
self.value = value
ctx.ui.error(value)
class InstallError(pisi.actionsapi.Error):
def __init__(self, value=''):
pisi.actionsapi.Error.__init__(self, value)
self.value = value
ctx.ui.error(value)
def configure(parameters = ''):
'''configure source with given parameters.'''
export('PERL_MM_USE_DEFAULT', '1')
if can_access_file('Build.PL'):
if system('perl Build.PL installdirs=vendor destdir=%s' % get.installDIR()):
raise ConfigureError, _('Configure failed.')
else:
if system('perl Makefile.PL %s PREFIX=/usr INSTALLDIRS=vendor DESTDIR=%s' % (parameters, get.installDIR())):
raise ConfigureError, _('Configure failed.')
def make(parameters = ''):
'''make source with given parameters.'''
if can_access_file('Makefile'):
if system('make %s' % parameters):
raise MakeError, _('Make failed.')
else:
if system('perl Build %s' % parameters):
raise MakeError, _('perl build failed.')
def install(parameters = 'install'):
'''install source with given parameters.'''
if can_access_file('Makefile'):
if system('make %s' % parameters):
raise InstallError, _('Make failed.')
else:
if system('perl Build install'):
raise MakeError, _('perl install failed.')
removePacklist()
def removePacklist():
''' cleans .packlist file from perl packages '''
path = '%s/%s' % (get.installDIR(), "/usr/lib/perl5/vendor_perl/%s/%s-linux-thread-multi/auto/" % (get.curPERL(), get.HOST().split("-")[0]))
for root, dirs, files in os.walk(path):
for packFile in files:
if packFile == ".packlist":
if can_access_file('%s/%s' % (root, packFile)):
unlink('%s/%s' % (root, packFile))
|
from nose.tools import eq_ as eq
import os
from cStringIO import StringIO
from gitosis import init, repository, run_hook
from gitosis.config import GitosisRawConfigParser as RawConfigParser
from gitosis.test.util import maketemp, readFile
def test_post_update_simple():
tmp = maketemp()
repos = os.path.join(tmp, 'repositories')
os.mkdir(repos)
admin_repository = os.path.join(repos, 'gitosis-admin.git')
pubkey = (
'ssh-rsa '
+'0123456789ABCDEFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
+'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA= fakeuser@fakehost')
user = 'theadmin'
cfg = RawConfigParser()
cfg.add_section('gitosis')
cfg.set('gitosis', 'repositories', repos)
generated = os.path.join(tmp, 'generated')
os.mkdir(generated)
cfg.set('gitosis', 'generate-files-in', generated)
ssh = os.path.join(tmp, 'ssh')
os.mkdir(ssh)
cfg.set(
'gitosis',
'ssh-authorized-keys-path',
os.path.join(ssh, 'authorized_keys'),
)
init.init_admin_repository(
git_dir=admin_repository,
pubkey=pubkey,
user=user,
config=cfg,
)
repository.init(path=os.path.join(repos, 'forweb.git'))
repository.init(path=os.path.join(repos, 'fordaemon.git'))
repository.fast_import(
git_dir=admin_repository,
committer='John Doe <jdoe@example.com>',
commit_msg="""\
stuff
""",
parent='refs/heads/master^0',
files=[
('gitosis.conf', """\
[gitosis]
[group gitosis-admin]
members = theadmin
writable = gitosis-admin
[repo fordaemon]
daemon = yes
[repo forweb]
gitweb = yes
owner = John Doe
description = blah blah
"""),
('keydir/jdoe.pub',
'ssh-rsa '
+'0123456789ABCDEFBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB'
+'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB= jdoe@host.example.com'),
],
)
run_hook.post_update(
cfg=cfg,
git_dir=admin_repository,
)
got = readFile(os.path.join(repos, 'forweb.git', 'description'))
eq(got, 'blah blah\n')
got = os.listdir(generated)
got.sort()
eq(got, ['projects.list', 'repos.list'])
got = readFile(os.path.join(generated, 'projects.list'))
eq(
got,
"""\
forweb.git John+Doe
""",
)
got = os.listdir(os.path.join(repos, 'fordaemon.git'))
assert 'git-daemon-export-ok' in got, \
"git-daemon-export-ok not created: %r" % got
got = os.listdir(ssh)
eq(got, ['authorized_keys'])
got = readFile(os.path.join(ssh, 'authorized_keys')).splitlines(True)
assert 'command="gitosis-serve jdoe",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty ssh-rsa 0123456789ABCDEFBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB= jdoe@host.example.com\n' in got, \
"SSH authorized_keys line for jdoe not found: %r" % got
|
'''
PEace
=======
A Python library for reading Portable Executable files.
Simple to use, simple to read.
PE = PEace('path_to_pe')
PE.Sections contains an array of all the PE Sections with their fields and values
PE.getSectionByName('.section') returns the section given by the name argument
PE.ImportModules contains an array listing all the imported functions
PE.ExportModules same as above, for exports
Also includes a function to read Null Terminated Byte Strings from files (C style)
string = self.readNTTS(file_offset)
Reads bytes until null byte is encountered.
Written by Ruben Ventura [tr3w]
the.tr3w at gmail dot com
(@tr3w_)
'''
import struct
class PEace(object):
def __init__(self, path):
try:
self.f = open(path, 'rb')
except IOError:
raise Exception("[!] Cannot open file: %s" % IOError)
self.DOSHeader = self.getDOSHeader()
self.PEHeader = self.getPEHeader()
self.ImageOptionalHeader = self.getImageOptionalHeader()
self.ImageDataDirectory = self.getImageDataDirectory()
self.Sections = self.getSections()
self.ImportModules = self.getImports()
self.ExportModules = self.getExports()
# print "Finished setting up for %s" % path
def getDOSHeader(self):
DOSHeader = self.f.read(0x40)
if DOSHeader[:2] != 'MZ':
raise Exception("[!] Invalid DOS header.")
return DOSHeader
def getPEHeader(self):
self.PEHeaderOffset = unpackL(self.DOSHeader[0x3c:])
self.f.seek(self.PEHeaderOffset)
# read Signature and IMAGE_FILE_HEADER (0x14)
PEHeader = self.f.read(0x18)
if PEHeader[:4] != "PE\0\0":
raise Exception("[!] Invalid PE header.")
if PEHeader[4:6] == '\x4c\x01':
pass#print '# 32-bit executable'
elif PEHeader[4:6] == '\x00\x02':
pass#print '# 64-bit executable'
else:
pass# ' not a i386+ image'
return PEHeader
def getImageOptionalHeader(self):
self.f.seek(self.PEHeaderOffset + len(self.PEHeader))
SizeOfOptionalHeader = unpackH(self.PEHeader[0x14:0x16])
ImageOptionalHeader = self.f.read(SizeOfOptionalHeader)
return ImageOptionalHeader
def getImageDataDirectory(self):
# ImageDataDirectory (the last entry of the OptionalHeader) has 16 entries of 8 bytes each (0x80)
NumberOfRvas = self.ImageOptionalHeader[len(self.ImageOptionalHeader) - 0x84 : len(self.ImageOptionalHeader) - 0x80]
ImageDataDirectory = self.ImageOptionalHeader[len(self.ImageOptionalHeader) - 0x80 : len(self.ImageOptionalHeader)]
#for i in xrange(0, NumberOfRvas):
return ImageDataDirectory
def getSections(self):
NumberOfSections = unpackH(self.PEHeader[6:8])
Sections = []
f = self.f
f.seek(self.PEHeaderOffset + len(self.PEHeader) + len(self.ImageOptionalHeader))
for i in xrange(0, NumberOfSections):
Sections.append({'Name' : f.read(8),
'PhysicalAddress' : unpackL(f.read(4)),
'VirtualAddress' : unpackL(f.read(4)),
'SizeOfRawData' : unpackL(f.read(4)),
'PointerToRawData' : unpackL(f.read(4)),
'PointerToRelocations': unpackL(f.read(4)),
'PointerToLinenumbers': unpackL(f.read(4)),
'NumberOfRelocations' : unpackH(f.read(2)),
'NumberOfLineNumbers' : unpackH(f.read(2)),
'Characteristics' : unpackL(f.read(4))
})
return Sections
def getSectionByName(self, Name):
for section in self.Sections:
if Name in section['Name']:
return section
return 0
def getExports(self):
f = self.f
ExportSection = self.getSectionByName('.edata')
if not ExportSection: return 0
ExportOffset = ExportSection['PointerToRawData']
ImageExportOffset = unpackL(self.ImageDataDirectory[:4])
ExportSize = unpackL(self.ImageDataDirectory[4:8])
f.seek(ExportOffset)
ImageExportDirectory = f.read(ExportSize)
NumberOfFunctions = unpackL(ImageExportDirectory[0x14:0x18])
NumberOfNames = unpackL(ImageExportDirectory[0x18:0x1c])
if NumberOfFunctions != NumberOfNames:
#WTF?
print 'functions != names'
AddressOfFunctions = unpackL(ImageExportDirectory[0x1c:0x20]) - ImageExportOffset + ExportOffset
AddressOfNames = unpackL(ImageExportDirectory[0x20:0x24]) - ImageExportOffset + ExportOffset
f.seek(AddressOfNames)
ExportNamePointers = f.read(NumberOfNames * 4)
ExportNamesPointers = [unpackL(ExportNamePointers[i:i+4]) - ImageExportOffset + ExportOffset for i in range(0, len(ExportNamePointers), 4)]
ExportNamePointers = []
i = 0
for pointer in ExportNamesPointers:
ExportNamePointers.append(self.readNTBS(pointer))
return ExportNamePointers
def getImports(self):
ImportSection = self.getSectionByName('.idata')
if not ImportSection: return 0
ImportOffset = ImportSection['PointerToRawData']
ImageImportOffset = unpackL(self.ImageDataDirectory[8:0xc])
ImportSize = unpackL(self.ImageDataDirectory[0xc:0x10])
ImageImportDirectory = []
f = self.f
f.seek(ImportOffset)
for i in xrange(0x14, ImportSize, 0x14):
ImageImportDirectory.append(f.read(0x14))
ImportModules = []
for module in ImageImportDirectory:
if module == '\x00' * 0x14: # extra validation cuz sometimes ImportSize says another thing
break
module_name = unpackL(module[0xc:0x10]) - ImageImportOffset + ImportOffset
FirstThunk = unpackL(module[:4])
offset_to_pointers = ImportOffset + FirstThunk - ImageImportOffset
pointers = []
f.seek(offset_to_pointers)
while 1:
pointer = unpackL(f.read(4))
if not pointer:
break
pointers.append(pointer - ImageImportOffset + ImportOffset)
for p in pointers:
ImportModules.append("%s : %s" % (self.readNTBS(module_name), self.readNTBS(p + 2)))
return ImportModules
def readNTBS(self, p):
s = ''
self.f.seek(p)
while 1:
s += self.f.read(1)
if s[-1] == "\x00":
return s[:-1]
def unpackL(s):
return struct.unpack("<L", s)[0]
def unpackH(s):
return struct.unpack("<H", s)[0]
|
__author__ = 'Dario'
import threading
import subprocess
import time
import sys
import io
import os
import logging
import socket
import traceback
import psutil
import ExternalProfitServer
import SwitcherData
from SwitcherData import SwitcherData as SD
from console.switcher import HTMLBuilder
from errorReports import ErrorReport as err
import requests
MIN_TIME_THREAD_PROBED = 120
CPU_TIME = 0
TIME_PROBED = 1
LOOP_SLEEP_TIME = 5
MINER_CRASHED = "crashes"
MINER_FREEZED = "freezes"
MINER_CRASHED_OR_FREEZED = "crashes or freezes"
LOCAL_START = "LOCAL_START"
LOCAL_STOP = "LOCAL_STOP"
EXTERNAL_SYNC = True
URL = 'http://localhost:8081/ExternalProfitDGB'
TIMEOUT = 10
class SwitchingThread (threading.Thread):
def __init__(self, name, counter, console, rebooting, resume):
super(SwitchingThread, self).__init__()
self._stop = threading.Event()
self.configChangedFlag = False
self.rebooting = rebooting
self.resume = resume
self.activeMiner = None
self.switcherData = None
self.console = console
threading.Thread.__init__(self)
self.name = name
self.counter = counter
self.line = str()
self.lines = []
def setActiveConfigFile(self, activeConfigFile):
self.activeConfigFile = activeConfigFile
def run(self):
#print "Starting thread " + self.name + " PID = " + str(self.ident) + " - counter = " + str(self.counter)
self.mine_threaded(self)
def mine_threaded(self, thread):
if self.rebooting:
time.sleep(30)
self.console.onMiningProcessStarted()
self.switcherData = SD(self.console, thread.activeConfigFile)
if EXTERNAL_SYNC:
ExternalProfitServer.start(self.switcherData)
if self.resume or self.rebooting:
self.switcherData.loadData()
errors = 0
#Init vars
self.mainMode = None
switchtext = None
self.cpu1 = None
self.cpu2 = None
self.cpuF1 = None
self.cpuF2 = None
prevScriptPath = None
scriptPath = None
prevSwitchtext = None
globalStopped = True
externalStopped = False
wasStopped = False
stopReason = None
maxMinerFails = False
loopMinerStatus = None
self.external_profit_total = None
while True:
try:
dataError = self.switcherData.fetchData(thread.activeConfigFile)
self.mainMode = self.switcherData.config_json["mainMode"]
threadStopped = self.isStopped()
if dataError:
nowP = time.strftime("%H:%M:%S", time.localtime(time.time()))
hcF = HTMLBuilder.hashColorF1["FAIL"]
hcB = HTMLBuilder.hashColorB1["FAIL"]
self.switcherData.pl(nowP + " > " + dataError + (" " * 109), hcF, hcB)
if threadStopped:
break
else:
loopMinerStatus = self.waitLoop(self.switcherData.config_json["sleepSHORT"], globalStopped, externalStopped, self.switcherData, dataError=True)
continue
# New Algo found to switch to!
if self.switcherData.isSwitchToNewAlgo(threadStopped):
prevSwitchtext = switchtext
scriptPath = self.switcherData.getScriptPath()
switchtext = "> " + self.switcherData.maxAlgo
restart = not threadStopped
status = "SWITCH"
errors = 0
# Still same Algo, check if the miner is running OK
else:
switchtext = " " + self.switcherData.currentAlgo
self.cpu2 = self.getCPUUsages(self.switcherData.getMiner())
#if not globalStopped or self.mainMode == "simple":
stopReason = loopMinerStatus if loopMinerStatus else self.minerStopped(self.cpu1, self.cpu2, self.switcherData.getMiner(), self.switcherData.config_json)
restart = ( not globalStopped or self.mainMode == "simple" ) and ( stopReason in (MINER_CRASHED, MINER_FREEZED, LOCAL_START) )
self.cpu1 = self.cpu2
if restart and stopReason in ( MINER_CRASHED, MINER_FREEZED ):
switchtext = "x " + self.switcherData.currentAlgo
status = "FAIL"
errors += 1
if errors >= self.switcherData.config_json["maxErrors"]:
status = "MAX_FAIL"
prevSwitchtext = switchtext
maxMinerFails = True
else:
switchtext = ". " + self.switcherData.currentAlgo
status = "OK"
errors = 0
self.switcherData.initRound(status)
externalStopped = self.isExternalStopped(externalStopped, loopMinerStatus)
wasStopped = self.switcherData.wasStopped
globalStopped = self.switcherData.globalStopped = self.switcherData.globalStopped or externalStopped
prevScriptPath = scriptPath
if globalStopped:
self.kill()
if status != "SWITCH":
status = "OK"
switchtext = "S " + self.switcherData.currentAlgo
if self.checkRestart(prevScriptPath, scriptPath, restart, maxMinerFails, globalStopped, wasStopped):
sleepTime = self.switcherData.config_json["sleepLONG"]
t1 = time.time()
if not self.switcherData.config_json["debug"]:
if self.mainMode == "advanced":
self.kill()
retCode = self.startMiners(scriptPath, self.switcherData.maxAlgo, status == "SWITCH")
#retCode = subprocess.Popen('cd /d "' + workingDirectory.encode(sys.getfilesystemencoding()) + '" && start cmd /c "' + scriptPath.encode(sys.getfilesystemencoding()) + '"', shell=True)
#subprocess.call('cd /d "' + unicode(workingDirectory) + '" && start cmd /c "' + unicode(scriptPath) + '"', shell=True)
#subprocess.call('cd /d "' + workingDirectory + '" && start cmd /c "' + scriptPath + '"', shell=True)
if retCode is None:
#switcherData.pl()
#switcherData.pl("Please, select a mining device first!: " + scriptPath, HTMLBuilder.COLOR_RED)
#question = "Please, select a mining device in the lower panel to start mining."
#dlg = GMD.GenericMessageDialog(self.console, question, "Unable to start your mining session...", wx.OK)
#dlg.ShowModal()
#dlg.Destroy()
self.switcherData.pl()
self.switcherData.pl("Failed to start your miner(s): " + scriptPath, HTMLBuilder.COLOR_RED)
breakAt = "No mining device set"
self.stop(True)
break
if not retCode:
self.switcherData.pl()
self.switcherData.pl("Failed to start your miner(s): " + scriptPath, HTMLBuilder.COLOR_RED)
breakAt = "failed miner start"
self.stop(True)
break
if self.waitForMinerToStart(self.switcherData.getMiner(), self.switcherData.config_json["rampUptime"]):
self.cpu1 = self.getCPUUsages(self.switcherData.getMiner())
self.activeMiner = self.switcherData.getMiner()
restartTime = time.time() - t1
else:
sleepTime = self.switcherData.config_json["sleepSHORT"]
timeStopped = 0 if status != "FAIL" else LOOP_SLEEP_TIME if stopReason == MINER_CRASHED else MIN_TIME_THREAD_PROBED / 2.0
self.switcherData.executeRound(status, timeStopped, maxMinerFails, self.resume, prevSwitchtext, switchtext, self.external_profit_total)
if self.isStopped():
breakAt = "after prints, thread stopped"
self.stop(self.mainMode == "advanced")
break
if self.checkMaxFails(status, stopReason, self.switcherData):
breakAt = "after prints, max fails"
self.stop(True)
break
loopMinerStatus = self.waitLoop(sleepTime, globalStopped, externalStopped, self.switcherData)
self.switcherData.loadConfig(thread.activeConfigFile)
except Exception as ex:
self.switcherData.pl()
self.switcherData.pl("Unexpected error.", HTMLBuilder.COLOR_RED)
self.switcherData.pl()
for line in traceback.format_exc().split('\n'):
self.switcherData.pl(line, HTMLBuilder.COLOR_RED)
self.printTraceback("Unexpected error")
err.ErrorReport().sendReport(self.console, traceback.format_exc())
break
self.switcherData.end()
self.console.onMiningProcessStopped()
def checkRestart(self, prevScriptPath, scriptPath, restart, maxMinerFails, globalStopped, wasStopped):
return ( restart and not maxMinerFails and not globalStopped ) or \
( wasStopped and not globalStopped ) or \
self.scriptChanged(prevScriptPath, scriptPath, restart, globalStopped)
def isExternalStopped(self, externalStopped, loopMinerStatus):
if LOCAL_START == loopMinerStatus:
return False
if LOCAL_STOP == loopMinerStatus:
return True
return externalStopped
def scriptChanged(self, prevScriptPath, scriptPath, restart, globalStopped):
return not restart and not globalStopped and ( prevScriptPath and scriptPath and (prevScriptPath != scriptPath) )
def waitLoop(self, sleepTime, globalStopped, externalStopped, switcherData, dataError=False):
self.cpuF1 = self.cpu1
t_initSleep = time.time()
while (time.time() < (t_initSleep + sleepTime)) and not self.configChangedFlag:
if self.isStopped():
return None
if not dataError:
ret = self.checkMinersInLoop(globalStopped, switcherData)
if ret:
return ret
if EXTERNAL_SYNC:
external_profit = 0
try:
content = requests.request('get', URL, timeout=TIMEOUT).content
external_profit = float(content.split(';')[0])
self.external_profit_total = int(float(content.split(';')[1]))
except:
pass
local_profit = switcherData.getProfit()
if externalStopped and local_profit > external_profit:
return LOCAL_START
if not externalStopped and local_profit < external_profit:
return LOCAL_STOP
#print 'external_profit = ' % external_profit
time.sleep(LOOP_SLEEP_TIME)
self.configChangedFlag = False
def checkMinersInLoop(self, globalStopped, switcherData):
if self.mainMode == "advanced":
if not globalStopped and switcherData.config_json["monitor"]:
try:
self.cpuF2 = self.getCPUUsages(switcherData.getMiner())
if self.minerCrashed(self.cpu1, self.cpuF2, switcherData.getMiner(), switcherData.config_json):
return MINER_CRASHED
if (self.cpuF2[TIME_PROBED] - self.cpuF1[TIME_PROBED]) > MIN_TIME_THREAD_PROBED:
if self.minerFreezed(self.cpuF1, self.cpuF2, switcherData.getMiner(), switcherData.config_json):
return MINER_FREEZED
else:
self.cpuF1 = self.cpuF2
except Exception, ex:
logging.exception("Loop error")
else:
return None if self.console.frame_myr.checkMinerCrashed() else MINER_CRASHED
def startMiners(self, scriptPath, maxAlgo, switch):
retCode = None
if "advanced" == self.mainMode:
workingDirectory = scriptPath[0:scriptPath.rfind("\\")]
retCode = subprocess.call('cd /d "' + workingDirectory.encode(sys.getfilesystemencoding()) + '" && start cmd /c "' + scriptPath.encode(sys.getfilesystemencoding()) + '"', shell=True)
return not retCode
else:
retCode = self.console.frame_myr.executeAlgo(maxAlgo, switch)
return retCode
def configChanged(self):
self.configChangedFlag = True
def checkMaxFails(self, status, stopReason, switcherData):
if status == "MAX_FAIL":
if switcherData.config_json["reboot"] and (switcherData.config_json["rebootIf"] == MINER_CRASHED_OR_FREEZED or switcherData.config_json["rebootIf"] == stopReason):
switcherData.pl()
self.prepareReboot()
switcherData.pl(str(switcherData.config_json["maxErrors"]) + " back to back miner " + stopReason + " ...rebooting!", HTMLBuilder.COLOR_RED)
subprocess.call('shutdown /r')
self.stop(True)
else:
switcherData.pl()
switcherData.pl(str(switcherData.config_json["maxErrors"]) + " back to back miner " + stopReason, HTMLBuilder.COLOR_RED)
self.stop(True)
switcherData.log()
return True
return False
def getCPUUsages(self, miner):
cpu = {}
timeCPUProbed = None
if not miner:
return cpu
for proc in psutil.process_iter():
try:
proc.name()
except:
continue
if miner in proc.name():
cpu[proc.pid] = proc.get_cpu_times().user
timeCPUProbed = time.time()
return (cpu, timeCPUProbed)
#def checkSwitchingThreadStopped(self):
# return self.isStopped()
def waitForMinerToStart(self, miner, ramp_up_time):
if not miner:
return False
numThreadsPrev = 0
i=0
while not numThreadsPrev and i < 60:
time.sleep(1)
self.cpu1 = self.getCPUUsages(miner)
numThreadsPrev = len(self.cpu1)
time.sleep(ramp_up_time)
return not(i == 60)
# Returns None if the miner is running, or MINER_DIED or MINER_FREEZED
def minerStopped(self, cpu1, cpu2, miner, config_json):
if self.mainMode == "advanced":
crashed = self.minerCrashed(cpu1, cpu2, miner, config_json)
if crashed:
return crashed
else:
return self.minerFreezed(cpu1, cpu2, miner, config_json)
else:
return None if self.console.frame_myr.checkMinerCrashed() else MINER_CRASHED
# Returns None if the miner is running, or MINER_CRASHED
def minerCrashed(self, cpu1, cpu2, miner, config_json):
if not miner:
return None
if config_json["debug"] or not config_json["monitor"]:
return None
if not cpu2:
return None
if (not cpu2[CPU_TIME] or len(cpu2[CPU_TIME]) == 0):
return MINER_CRASHED
if (not cpu1 or not cpu1[CPU_TIME] or len(cpu1[CPU_TIME]) == 0):
return None
if not cpu2[CPU_TIME] or len(cpu2[CPU_TIME]) == 0:
return MINER_CRASHED
# Same number of instances runing?
numThreadsPrev = len(cpu1[CPU_TIME])
numThreadsNew = len(cpu2[CPU_TIME])
if numThreadsNew < numThreadsPrev:
return MINER_CRASHED
return None
# Returns None if the miner is running, or MINER_FREEZED
def minerFreezed(self, cpu1, cpu2, miner, config_json):
if not miner:
return None
if config_json["debug"] or not config_json["monitor"]:
return None
if not cpu1 or not cpu2:
return None
if (cpu2[TIME_PROBED] - cpu1[TIME_PROBED]) < MIN_TIME_THREAD_PROBED:
return None
if (not cpu1[CPU_TIME] or len(cpu1[CPU_TIME]) == 0):
return None
# Any CPU usage?
for pid in cpu1[CPU_TIME]:
try:
if cpu1[CPU_TIME][pid] == cpu2[CPU_TIME][pid]:
return MINER_FREEZED
except KeyError:
return None
return None
def printTraceback(self, text):
print time.strftime(SwitcherData.DATE_FORMAT_PATTERN, time.localtime()) + " - " + text
print traceback.format_exc()
#traceback.print_stack()
# Returns MINER_CRASHED or MINER_FREEZED if the miner is not running, or the CPU usage/timestamp if it is
def minerStoppedFinal(self, cpu1, miner, config_json):
cpu2 = self.getCPUUsages(miner)
minerStatus = self.minerStopped(cpu1, cpu2, miner, config_json)
if minerStatus in (MINER_CRASHED, MINER_FREEZED):
return minerStatus
else:
return cpu2
# Returns True if not enough time has passed between CPU usage samples to check if the miner is stopped
#def belowTimeBetweenCPUSamplesThreshold(self, self.cpu1, self.cpu2):
# #init_ref_time = self.cpu1[TIME_PROBED] if ref_time is None else ref_time
# time_between_probes = self.cpu2[TIME_PROBED] - self.cpu1[TIME_PROBED]
#
# return (time_between_probes) < MIN_TIME_THREAD_PROBED
def prepareReboot(self):
io.open("reboot", 'wt').write("reboot=" + unicode(time.time()))
if os.name == "nt":
import win32com.client
ws = win32com.client.Dispatch("wscript.shell")
scut = ws.CreateShortcut(os.getenv('APPDATA') + '\\Microsoft\Windows\Start Menu\Programs\Startup\\myriadSwitcher.lnk')
#self.htmlBuilder.pl(os.getcwd())
scut.TargetPath = '"' + (os.getcwd() + '\\MyriadSwitcherGUI.exe"')
scut.WorkingDirectory = os.getcwd()
scut.Save()
if os.name == "posix":
pass
def kill(self):
if self.mainMode == "advanced":
self.killMiner(self.activeMiner) if self.activeMiner else self.killMiners()
else:
self.console.frame_myr.stopMiners()
def killMiners(self):
for miner in SwitcherData.MINER_CHOICES:
self.killMiner(miner)
def killMiner(self, miner):
for proc in psutil.process_iter():
try:
proc.name()
except:
continue
if miner in proc.name():
proc.kill()
def stop(self, kill_miners=False, terminate=False):
#self.htmlBuilder.pl()
#self.htmlBuilder.pl("Stopping... ")
socket.setdefaulttimeout(5)
if kill_miners:
try:
#if self.mainMode == "advanced":
self.kill()
except:
print "Failed to kill miners"
if EXTERNAL_SYNC:
ExternalProfitServer.force_stop()
if terminate:
st_count = 0
while ExternalProfitServer.STARTED and st_count < 60:
#httpd.setSwitcherData(switcherdata_p)
time.sleep(1)
st_count += 1
#self.console.parent.onMiningProcessStopped()
self._stop.set()
def isStopped(self):
return self._stop.isSet()
|
import pipes
import os
import string
import unittest
from test.support import TESTFN, run_unittest, unlink, reap_children
if os.name != 'posix':
raise unittest.SkipTest('pipes module only works on posix')
TESTFN2 = TESTFN + "2"
s_command = 'tr %s %s' % (string.ascii_lowercase, string.ascii_uppercase)
class SimplePipeTests(unittest.TestCase):
def tearDown(self):
for f in (TESTFN, TESTFN2):
unlink(f)
def testSimplePipe1(self):
t = pipes.Template()
t.append(s_command, pipes.STDIN_STDOUT)
f = t.open(TESTFN, 'w')
f.write('hello world #1')
f.close()
with open(TESTFN) as f:
self.assertEqual(f.read(), 'HELLO WORLD #1')
def testSimplePipe2(self):
with open(TESTFN, 'w') as f:
f.write('hello world #2')
t = pipes.Template()
t.append(s_command + ' < $IN > $OUT', pipes.FILEIN_FILEOUT)
t.copy(TESTFN, TESTFN2)
with open(TESTFN2) as f:
self.assertEqual(f.read(), 'HELLO WORLD #2')
def testSimplePipe3(self):
with open(TESTFN, 'w') as f:
f.write('hello world #2')
t = pipes.Template()
t.append(s_command + ' < $IN', pipes.FILEIN_STDOUT)
f = t.open(TESTFN, 'r')
try:
self.assertEqual(f.read(), 'HELLO WORLD #2')
finally:
f.close()
def testEmptyPipeline1(self):
# copy through empty pipe
d = 'empty pipeline test COPY'
with open(TESTFN, 'w') as f:
f.write(d)
with open(TESTFN2, 'w') as f:
f.write('')
t=pipes.Template()
t.copy(TESTFN, TESTFN2)
with open(TESTFN2) as f:
self.assertEqual(f.read(), d)
def testEmptyPipeline2(self):
# read through empty pipe
d = 'empty pipeline test READ'
with open(TESTFN, 'w') as f:
f.write(d)
t=pipes.Template()
f = t.open(TESTFN, 'r')
try:
self.assertEqual(f.read(), d)
finally:
f.close()
def testEmptyPipeline3(self):
# write through empty pipe
d = 'empty pipeline test WRITE'
t = pipes.Template()
with t.open(TESTFN, 'w') as f:
f.write(d)
with open(TESTFN) as f:
self.assertEqual(f.read(), d)
def testQuoting(self):
safeunquoted = string.ascii_letters + string.digits + '@%_-+=:,./'
unsafe = '"`$\\!'
self.assertEqual(pipes.quote(''), "''")
self.assertEqual(pipes.quote(safeunquoted), safeunquoted)
self.assertEqual(pipes.quote('test file name'), "'test file name'")
for u in unsafe:
self.assertEqual(pipes.quote('test%sname' % u),
"'test%sname'" % u)
for u in unsafe:
self.assertEqual(pipes.quote("test%s'name'" % u),
"'test%s'\"'\"'name'\"'\"''" % u)
def testRepr(self):
t = pipes.Template()
self.assertEqual(repr(t), "<Template instance, steps=[]>")
t.append('tr a-z A-Z', pipes.STDIN_STDOUT)
self.assertEqual(repr(t),
"<Template instance, steps=[('tr a-z A-Z', '--')]>")
def testSetDebug(self):
t = pipes.Template()
t.debug(False)
self.assertEqual(t.debugging, False)
t.debug(True)
self.assertEqual(t.debugging, True)
def testReadOpenSink(self):
# check calling open('r') on a pipe ending with
# a sink raises ValueError
t = pipes.Template()
t.append('boguscmd', pipes.SINK)
self.assertRaises(ValueError, t.open, 'bogusfile', 'r')
def testWriteOpenSource(self):
# check calling open('w') on a pipe ending with
# a source raises ValueError
t = pipes.Template()
t.prepend('boguscmd', pipes.SOURCE)
self.assertRaises(ValueError, t.open, 'bogusfile', 'w')
def testBadAppendOptions(self):
t = pipes.Template()
# try a non-string command
self.assertRaises(TypeError, t.append, 7, pipes.STDIN_STDOUT)
# try a type that isn't recognized
self.assertRaises(ValueError, t.append, 'boguscmd', 'xx')
# shouldn't be able to append a source
self.assertRaises(ValueError, t.append, 'boguscmd', pipes.SOURCE)
# check appending two sinks
t = pipes.Template()
t.append('boguscmd', pipes.SINK)
self.assertRaises(ValueError, t.append, 'boguscmd', pipes.SINK)
# command needing file input but with no $IN
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd $OUT',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd',
pipes.FILEIN_STDOUT)
# command needing file output but with no $OUT
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd $IN',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.append, 'boguscmd',
pipes.STDIN_FILEOUT)
def testBadPrependOptions(self):
t = pipes.Template()
# try a non-string command
self.assertRaises(TypeError, t.prepend, 7, pipes.STDIN_STDOUT)
# try a type that isn't recognized
self.assertRaises(ValueError, t.prepend, 'tr a-z A-Z', 'xx')
# shouldn't be able to prepend a sink
self.assertRaises(ValueError, t.prepend, 'boguscmd', pipes.SINK)
# check prepending two sources
t = pipes.Template()
t.prepend('boguscmd', pipes.SOURCE)
self.assertRaises(ValueError, t.prepend, 'boguscmd', pipes.SOURCE)
# command needing file input but with no $IN
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd $OUT',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd',
pipes.FILEIN_STDOUT)
# command needing file output but with no $OUT
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd $IN',
pipes.FILEIN_FILEOUT)
t = pipes.Template()
self.assertRaises(ValueError, t.prepend, 'boguscmd',
pipes.STDIN_FILEOUT)
def testBadOpenMode(self):
t = pipes.Template()
self.assertRaises(ValueError, t.open, 'bogusfile', 'x')
def testClone(self):
t = pipes.Template()
t.append('tr a-z A-Z', pipes.STDIN_STDOUT)
u = t.clone()
self.assertNotEqual(id(t), id(u))
self.assertEqual(t.steps, u.steps)
self.assertNotEqual(id(t.steps), id(u.steps))
self.assertEqual(t.debugging, u.debugging)
def test_main():
run_unittest(SimplePipeTests)
reap_children()
if __name__ == "__main__":
test_main()
|
import subprocess, os, urllib.request
from bs4 import BeautifulSoup
from Bio import motifs
from Bio.Alphabet.IUPAC import unambiguous_dna
from Bio.Seq import Seq
import numpy as np
def read_bed(infile, region_chrom, region_start, region_end):
intervals = []
if region_start: region_start = int(region_start)
if region_end: region_end = int(region_end)
inf = open(infile, "r")
for line in inf:
fields = line.rstrip().split("\t")
if len(fields)<3: continue
chrom = fields[0].replace('chr','')
if chrom != region_chrom: continue
try:
[start,end] = [int(i) for i in fields[1:3]]
except:
continue
# pass None as region start/end to skip this filter
if (region_start and region_end) and (end<=region_start or region_end<=start): continue
intervals.append([start,end])
inf.close()
return intervals
def overlap(interval_list, qry_interval):
ovlp = False
for interval in interval_list:
if qry_interval[1]<interval[0]: continue
if interval[1]<qry_interval[0]: continue
# some leeway could be left here one day ...
ovlp = True
return ovlp
def read_binding_intervals(data_dir, agonist_file, vehicle_file, chrom, region_start, region_end):
# agonist
infile = "{}/{}".format(data_dir, agonist_file)
agonist_binding_intervals = read_bed(infile, chrom, region_start, region_end)
if not vehicle_file: return agonist_binding_intervals
# if we have control file, subtract regions that popped up
# with vehicle only:
infile = "{}/{}".format(data_dir, vehicle_file)
vehicle_binding_intervals = read_bed(infile, chrom, region_start, region_end)
for interval in agonist_binding_intervals:
if overlap(vehicle_binding_intervals, interval):
agonist_binding_intervals.remove(interval)
continue
return agonist_binding_intervals
def read_pfm(jaspar_motifs_file, tf_name):
motif = None
with open(jaspar_motifs_file) as handle:
for m in motifs.parse(handle, "jaspar"):
if m.name == tf_name:
motif = m
break
return motif
def read_or_download_sequence(chipseq_regions_dir, assembly, chromosome, tf, start, end):
seqfile = "{}/{}_{}_{}_{}_{}.txt".format(chipseq_regions_dir, tf, assembly, chromosome, start, end)
if (os.path.exists(seqfile) and os.path.getsize(seqfile) > 0):
outf = open(seqfile, "r")
seq = outf.read()
outf.close()
else:
seq = ucsc_fragment_sequence(assembly, chromosome, start, end)
outf = open(seqfile, "w")
outf.write(seq.replace("\n", ""))
outf.close()
return seq
def ucsc_fragment_sequence(assembly, chrom, start, end):
if not 'chr' in chrom: chrom = 'chr'+chrom
das_request = "http://genome.ucsc.edu/cgi-bin/das/{}/".format(assembly)
das_request += "dna?segment={}:{},{}".format(chrom, start, end)
response = urllib.request.urlopen(das_request)
html = response.read()
soup = BeautifulSoup(html, 'html.parser')
if not soup: return None
return soup.find('dna').string.strip().replace("\n", "")
def ucsc_gene_coords(gene_name, ucsc_gene_regions_dir):
cmd = "grep -i %s %s/*" % (gene_name, ucsc_gene_regions_dir)
ret = subprocess.check_output(cmd, shell=True).decode('utf-8').rstrip()
if not ret or len(ret) == 0:
print("no entry for %s found in %s " % (gene_name, ucsc_gene_regions_dir))
exit()
lines = []
for line in ret.split("\n"):
fields = line.split("\t")
[infile, refseq_id] = fields[0].split(":")
lines.append(fields[1:])
if len(lines) == 0:
print("no entry for %s found in %s " % (gene_name, ucsc_gene_regions_dir))
return None, None
if len(lines) == 2:
print("more than one entry found for %s found in %s " % (gene_name, ucsc_gene_regions_dir))
return None, None
# we assume certain format in the file name, containing the chromosome number: e.g. chr18.csv
chromosome = infile.split("/")[-1].replace(".csv", "")
[name, strand, txStart, txEnd] = lines[0]
return chromosome, strand, [int(txStart), int(txEnd)]
def parse_alignment(maf_file):
infile = open(maf_file, "r")
assemblies = []
address = {}
seq = {}
for line in infile:
if line[0] != 's': continue
fields = line.rstrip().split()
if len(fields)!=7: continue
[src, start, size, strand, src_size, sequence] = fields[1:7]
[start, size, src_size] = [int(i) for i in [start, size, src_size]]
fields = src.split(".")
assembly = fields[0]
chrom = ".".join(fields[1:]).replace("_","")
if strand=='+':
rfrom = start
rto = start + size -1
else:
rto = src_size - start #+ 1 no "+1" bcs in ucsc we count from 0
rfrom = rto - size + 1
if not assembly in assemblies:
assemblies.append(assembly)
address[assembly] = [chrom, rfrom, rto, strand]
seq[assembly] = sequence
else:
if strand=="+":
address[assembly][2] = rto
else:
address[assembly][1] = rfrom
seq[assembly] += sequence
infile.close()
return assemblies, address, seq
def get_alignment(species, assembly, chrom, region_from, region_to, scratch):
# from https://hgdownload.soe.ucsc.edu/admin/exe/linux.x86_6
maf_region_extraction_tool = "/usr/bin/mafsInRegion"
# mafs come from here http://hgdownload.cse.ucsc.edu/downloads.html
# http://hgdownload.cse.ucsc.edu/goldenPath/hg19/multiz100way/
# or whichever species or assembly appropriate
if not 'chr' in chrom: chrom = "chr"+chrom
maf_file = "/storage/databases/ucsc/mafs/{}/{}/{}.maf".format(species, assembly, chrom)
for dep in [maf_region_extraction_tool, maf_file, scratch]:
if not os.path.exists(dep):
print(dep, "not found")
exit()
if not os.path.isdir(scratch):
print(dir, "is not directory")
exit()
bed_in = "{}/{}.bed".format(scratch, os.getpid())
maf_out = "{}/{}.maf".format(scratch, os.getpid())
with open(bed_in,"w") as outf:
# it looks like mafsInRegion counts frmo 0 and takes the region_to as a non-inclusive upper limit
outf.write("{} {} {}\n".format(chrom, region_from-1, region_to))
cmd = "{} {} {} {}".format(maf_region_extraction_tool, bed_in, maf_out, maf_file)
subprocess.call(cmd, shell=True)
assemblies, address, seq = parse_alignment(maf_out)
os.remove(maf_out)
os.remove(bed_in)
return [assemblies, address, seq]
def remove_all_gaps(almt):
almt_length = min([len(seq) for seq in almt.values()])
gapped_pos = [i for i in range(almt_length) if len([seq for seq in almt.values() if seq[i]!='-'])==0]
for nm,seq in almt.items():
almt[nm] = ''.join([seq[i] for i in range(almt_length) if not i in gapped_pos ])
if __name__ == "__main__":
# print(ucsc_fragment_sequence('mm10',8, 57805369, 57805386))
#print(ucsc_gene_coords('Hand2', "/storage/databases/ucsc/gene_ranges/mouse/mm9"))
##############################
# careful parsing maf:
# from https://genome.ucsc.edu/FAQ/FAQformat.html#format5
# The "s" lines together with the "a" lines define a multiple alignment; the fields
# are src | start | size | strand | srcSize | sequence
# The start of the aligning region in the source sequence is a zero-based number.
# If the strand field is "-" then this is the start relative to the reverse-complemented source sequence.
# That is - it should be subtracted from the size column
# https://raw.githubusercontent.com/bxlab/bx-python/master/scripts/maf_to_fasta.py
# does not take this into account
# /usr/bin/maf_parse -s 58814511 -e 58814525 /storage/databases/ucsc/mafs/mouse/mm9/chr8.maf
# a score=938862.000000
# s mm9.chr8 58814510 15 + 131738871 G-AT-GC-ATTTTGTCTT
# s cavPor2.scaffold_280174 91854 13 + 98733 A-AT-GA-ATTACATC--
# q cavPor2.scaffold_280174 9-99-99-99999999--
# s rn4.chr16 52832882 15 - 90238779 G-AT-AC-CTTATGTCTT
# q rn4.chr16 9-99-99-9999999999
# s hg18.chr4 15657414 15 - 191273063 T-AT-GA-ATTCTGTCTT
# s panTro2.chr4 15951551 15 - 194897272 T-AT-GA-ACTCTGTCTT
# incidentially we discover that mafsInRegion (http://hgdownload.soe.ucsc.edu/admin/exe/linux.x86_64/mafsInRegion)
# is significantly faster
get_alignment('mouse', 'mm10', 'chr8', 57805369, 57805386, '/home/ivana/scratch', '/home/ivana/scratch/test.afa')
|
class RationalNumber:
def __init__(self, numerator, denominator):
self.numerator = numerator
self.denominator = denominator
def __str__(self):
return "Numerator: " +str(self.numerator) + " Denominator: " +str(self.denominator)
def __add__(self, object):
print "Add in invoked"
self.numerator = self.numerator + object.numerator
self.denominator = self.denominator + object.numerator
def __sub__(self, object):
print "Sub in invoked"
self.numerator = self.numerator - object.numerator
self.denominator = self.denominator - object.denominator
def __mul__(self, object):
print "Mul in invoked"
self.numerator = self.numerator * object.numerator
self.denominator = self.denominator * object.denominator
def __div__(self, object):
print "Div in invoked"
self.numerator = self.numerator / object.numerator
self.denominator = self.denominator / object.denominator
def __lt__(self, object):
print "Less-Than in invoked"
if(self.numerator < object.numerator):
return True
else:
return False
def __eq__(self, object):
print "Eqaul in invoked"
if(self.numerator == object.numerator):
return True
else:
return False
def __del__(self):
print "Del is invoked"
self.numerator = None
self.denominator = None
r1 = RationalNumber(10,5)
r2 = RationalNumber(20,10)
print r1
print r2
r1 + r2
print r1
print r2
r1 - r2
print r1
print r2
r1 * r2
print r1
print r2
r1 / r2
print r1
print r2
r1.__del__()
r2.__del__()
print r1
print r2
r3 = RationalNumber(10,20)
r4 = RationalNumber(20,40)
if(r3 > r4):
print "R3 is greater"
else:
print "R4 is greater"
if(r3 == r4):
print "R3 & R4 are equal"
else:
print "R3 & R4 are not equal"
|
from __future__ import absolute_import
import errno
import msvcrt
import os
import re
import stat
import sys
import tempfile
import time
from typing import IO, Optional
import bindings
from edenscmnative import osutil
from . import encoding, error, pycompat, win32, winutil
from .i18n import _
try:
# pyre-fixme[21]: Could not find `_winreg`.
import _winreg as winreg
winreg.CloseKey
except ImportError:
import winreg
getfstype = bindings.fs.fstype
executablepath = win32.executablepath
getmaxrss = win32.getmaxmemoryusage
getuser = win32.getuser
hidewindow = win32.hidewindow
makedir = win32.makedir
nlinks = win32.nlinks
oslink = win32.oslink
samedevice = win32.samedevice
samefile = win32.samefile
setsignalhandler = win32.setsignalhandler
split = os.path.split
testpid = win32.testpid
unlink = win32.unlink
checkosfilename = winutil.checkwinfilename
umask = 0o022
class mixedfilemodewrapper(object):
"""Wraps a file handle when it is opened in read/write mode.
fopen() and fdopen() on Windows have a specific-to-Windows requirement
that files opened with mode r+, w+, or a+ make a call to a file positioning
function when switching between reads and writes. Without this extra call,
Python will raise a not very intuitive "IOError: [Errno 0] Error."
This class wraps posixfile instances when the file is opened in read/write
mode and automatically adds checks or inserts appropriate file positioning
calls when necessary.
"""
OPNONE = 0
OPREAD = 1
OPWRITE = 2
def __init__(self, fp):
object.__setattr__(self, r"_fp", fp)
object.__setattr__(self, r"_lastop", 0)
def __enter__(self):
return self._fp.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
self._fp.__exit__(exc_type, exc_val, exc_tb)
def __getattr__(self, name):
return getattr(self._fp, name)
def __setattr__(self, name, value):
return self._fp.__setattr__(name, value)
def _noopseek(self):
self._fp.seek(0, os.SEEK_CUR)
def seek(self, *args, **kwargs):
object.__setattr__(self, r"_lastop", self.OPNONE)
return self._fp.seek(*args, **kwargs)
def write(self, d):
if self._lastop == self.OPREAD:
self._noopseek()
object.__setattr__(self, r"_lastop", self.OPWRITE)
return self._fp.write(d)
def writelines(self, *args, **kwargs):
if self._lastop == self.OPREAD:
self._noopeseek()
object.__setattr__(self, r"_lastop", self.OPWRITE)
return self._fp.writelines(*args, **kwargs)
def read(self, *args, **kwargs):
if self._lastop == self.OPWRITE:
self._noopseek()
object.__setattr__(self, r"_lastop", self.OPREAD)
return self._fp.read(*args, **kwargs)
def readline(self, *args, **kwargs):
if self._lastop == self.OPWRITE:
self._noopseek()
object.__setattr__(self, r"_lastop", self.OPREAD)
return self._fp.readline(*args, **kwargs)
def readlines(self, *args, **kwargs):
if self._lastop == self.OPWRITE:
self._noopseek()
object.__setattr__(self, r"_lastop", self.OPREAD)
return self._fp.readlines(*args, **kwargs)
class fdproxy(object):
"""Wraps osutil.posixfile() to override the name attribute to reflect the
underlying file name.
"""
def __init__(self, name, fp):
self.name = name
self._fp = fp
def __enter__(self):
self._fp.__enter__()
# Return this wrapper for the context manager so that the name is
# still available.
return self
def __exit__(self, exc_type, exc_value, traceback):
self._fp.__exit__(exc_type, exc_value, traceback)
def __iter__(self):
return iter(self._fp)
def __getattr__(self, name):
return getattr(self._fp, name)
def posixfile(name: str, mode: str = "r", buffering: int = -1) -> "IO":
"""Open a file with even more POSIX-like semantics"""
try:
fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
# PyFile_FromFd() ignores the name, and seems to report fp.name as the
# underlying file descriptor.
if sys.version_info[0] >= 3:
fp = fdproxy(name, fp)
return _fixseek(fp, mode)
# pyre-fixme[10]: Name `WindowsError` is used but not defined.
except WindowsError as err:
# convert to a friendlier exception
raise IOError(err.errno, "%s: %s" % (name, encoding.strtolocal(err.strerror)))
def fdopen(fd, mode="r", bufsize=-1, **kwargs):
fp = os.fdopen(fd, mode, bufsize, **kwargs)
return _fixseek(fp, mode)
def _fixseek(fp, mode):
"""Fix seek related issues for files with read+write mode on Windows,
by wrapping it in mixedfilemodewrapper.
"""
# The position when opening in append mode is implementation defined, so
# make it consistent with other platforms, which position at EOF.
if "a" in mode:
fp.seek(0, os.SEEK_END)
if "+" in mode:
return mixedfilemodewrapper(fp)
return fp
listdir = osutil.listdir
class winstdout(object):
"""stdout on windows misbehaves if sent through a pipe"""
def __init__(self, fp):
self.fp = fp
def __getattr__(self, key):
return getattr(self.fp, key)
def close(self):
try:
self.fp.close()
except IOError:
pass
def write(self, s):
try:
# This is workaround for "Not enough space" error on
# writing large size of data to console.
limit = 16000
l = len(s)
start = 0
self.softspace = 0
while start < l:
end = start + limit
self.fp.write(s[start:end])
start = end
except IOError as inst:
if inst.errno != 0:
raise
self.close()
raise IOError(errno.EPIPE, "Broken pipe")
def flush(self):
try:
return self.fp.flush()
except IOError as inst:
if inst.errno != errno.EINVAL:
raise
raise IOError(errno.EPIPE, "Broken pipe")
def _is_win_9x():
"""return true if run on windows 95, 98 or me."""
try:
return sys.getwindowsversion()[3] == 1
except AttributeError:
return "command" in encoding.environ.get("comspec", "")
def openhardlinks():
return not _is_win_9x()
def parsepatchoutput(output_line):
"""parses the output produced by patch and returns the filename"""
pf = output_line[14:]
if pf[0] == "`":
pf = pf[1:-1] # Remove the quotes
return pf
def sshargs(sshcmd, host, user, port):
"""Build argument list for ssh or Plink"""
pflag = "plink" in sshcmd.lower() and "-P" or "-p"
args = user and ("%s@%s" % (user, host)) or host
if args.startswith("-") or args.startswith("/"):
raise error.Abort(
_("illegal ssh hostname or username starting with - or /: %s") % args
)
args = shellquote(args)
if port:
args = "%s %s %s" % (pflag, shellquote(port), args)
return args
def setflags(f: str, l: bool, x: bool) -> None:
pass
def copymode(src, dst, mode=None):
pass
def checkexec(path: str) -> bool:
return False
def checklink(path: str) -> bool:
return False
def setbinary(fd):
# When run without console, pipes may expose invalid
# fileno(), usually set to -1.
fno = getattr(fd, "fileno", None)
if fno is not None and fno() >= 0:
msvcrt.setmode(fno(), os.O_BINARY)
def pconvert(path):
return path.replace(pycompat.ossep, "/")
def localpath(path):
return path.replace("/", "\\")
def normpath(path):
return pconvert(os.path.normpath(path))
def normcase(path):
return encoding.upper(path) # NTFS compares via upper()
normcasespec = encoding.normcasespecs.upper
normcasefallback = encoding.upperfallback
def samestat(s1, s2):
return False
_quotere = None
_needsshellquote = None
def shellquote(s):
r"""
>>> shellquote(br'C:\Users\xyz')
'"C:\\Users\\xyz"'
>>> shellquote(br'C:\Users\xyz/mixed')
'"C:\\Users\\xyz/mixed"'
>>> # Would be safe not to quote too, since it is all double backslashes
>>> shellquote(br'C:\\Users\\xyz')
'"C:\\\\Users\\\\xyz"'
>>> # But this must be quoted
>>> shellquote(br'C:\\Users\\xyz/abc')
'"C:\\\\Users\\\\xyz/abc"'
"""
global _quotere
if _quotere is None:
_quotere = re.compile(r'(\\*)("|\\$)')
global _needsshellquote
if _needsshellquote is None:
# ":" is also treated as "safe character", because it is used as a part
# of path name on Windows. "\" is also part of a path name, but isn't
# safe because shlex.split() (kind of) treats it as an escape char and
# drops it. It will leave the next character, even if it is another
# "\".
_needsshellquote = re.compile(r"[^a-zA-Z0-9._:/-]").search
if s and not _needsshellquote(s) and not _quotere.search(s):
# "s" shouldn't have to be quoted
return s
return '"%s"' % _quotere.sub(r"\1\1\\\2", s)
def quotecommand(cmd):
"""Build a command string suitable for os.popen* calls."""
if sys.version_info < (2, 7, 1):
# Python versions since 2.7.1 do this extra quoting themselves
return '"' + cmd + '"'
return cmd
def popen(command, mode="r"):
# Work around "popen spawned process may not write to stdout
# under windows"
# http://bugs.python.org/issue1366
command += " 2> %s" % os.devnull
return os.popen(quotecommand(command), mode)
def explainexit(code):
return _("exited with status %d") % code, code
def isowner(st):
return True
def findexe(command):
"""Find executable for command searching like cmd.exe does.
If command is a basename then PATH is searched for command.
PATH isn't searched if command is an absolute or relative path.
An extension from PATHEXT is found and added if not present.
If command isn't found None is returned."""
pathext = encoding.environ.get("PATHEXT", ".COM;.EXE;.BAT;.CMD")
pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
if os.path.splitext(command)[1].lower() in pathexts:
pathexts = [""]
def findexisting(pathcommand):
"Will append extension (if needed) and return existing file"
for ext in pathexts:
executable = pathcommand + ext
if os.path.exists(executable):
return executable
return None
if pycompat.ossep in command:
return findexisting(command)
for path in encoding.environ.get("PATH", "").split(pycompat.ospathsep):
executable = findexisting(os.path.join(path, command))
if executable is not None:
return executable
return findexisting(os.path.expanduser(os.path.expandvars(command)))
_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
"""Stat each file in files. Yield each stat, or None if a file
does not exist or has a type we don't care about.
Cluster and cache stat per directory to minimize number of OS stat calls."""
dircache = {} # dirname -> filename -> status | None if file does not exist
getkind = stat.S_IFMT
for nf in files:
nf = normcase(nf)
dir, base = os.path.split(nf)
if not dir:
dir = "."
cache = dircache.get(dir, None)
if cache is None:
try:
dmap = dict(
[
(normcase(n), s)
for n, k, s in listdir(dir, True)
if getkind(s.st_mode) in _wantedkinds
]
)
except OSError as err:
# Python >= 2.5 returns ENOENT and adds winerror field
# EINVAL is raised if dir is not a directory.
if err.errno not in (errno.ENOENT, errno.EINVAL, errno.ENOTDIR):
raise
dmap = {}
cache = dircache.setdefault(dir, dmap)
yield cache.get(base, None)
def username(uid=None):
"""Return the name of the user with the given uid.
If uid is None, return the name of the current user."""
return None
def groupname(gid=None):
"""Return the name of the group with the given gid.
If gid is None, return the name of the current group."""
return None
def removedirs(name: str) -> None:
"""special version of os.removedirs that does not remove symlinked
directories or junction points if they actually contain files"""
if listdir(name):
return
os.rmdir(name)
head, tail = os.path.split(name)
if not tail:
head, tail = os.path.split(head)
while head and tail:
try:
if listdir(head):
return
os.rmdir(head)
except (ValueError, OSError):
break
head, tail = os.path.split(head)
def rename(src: str, dst: str) -> None:
"""Atomically rename file src to dst, replacing dst if it exists
Note that this is only really atomic for files (not dirs) on the
same volume"""
try:
win32.movefileex(src, dst)
except OSError as e:
if e.errno != errno.EEXIST and e.errno != errno.EACCES:
raise
unlink(dst)
os.rename(src, dst)
def syncfile(fp):
"""Makes best effort attempt to make sure all contents previously written
to the fp is persisted to a permanent storage device."""
# See comments in posix implementation of syncdir for discussion on this
# topic.
try:
fp.flush()
os.fsync(fp.fileno())
except (OSError, IOError):
# do nothing since this is just best effort
pass
def syncdir(dirpath):
"""Makes best effort attempt to make sure previously issued
renames where target is a file immediately inside the specified
dirpath is persisted to a permanent storage device."""
# See comments in posix implementation for discussion on this topic.
# Do nothing.
def gethgcmd(argv=sys.argv):
return [sys.executable] + argv[:1]
def groupmembers(name):
# Don't support groups on Windows for now
raise KeyError
def isexec(f):
return False
class cachestat(object):
def __init__(self, path):
if path is None:
self.fi = None
else:
try:
self.fi = win32.getfileinfo(path)
except OSError as ex:
if ex.errno == errno.ENOENT:
self.fi = None
else:
raise
__hash__ = object.__hash__
def __eq__(self, other):
try:
lhs = self.fi
rhs = other.fi
if lhs is None or rhs is None:
return lhs is None and rhs is None
return (
lhs.dwFileAttributes == rhs.dwFileAttributes
and lhs.ftCreationTime.dwLowDateTime == rhs.ftCreationTime.dwLowDateTime
and lhs.ftCreationTime.dwHighDateTime
== rhs.ftCreationTime.dwHighDateTime
and lhs.ftLastWriteTime.dwLowDateTime
== rhs.ftLastWriteTime.dwLowDateTime
and lhs.ftLastWriteTime.dwHighDateTime
== rhs.ftLastWriteTime.dwHighDateTime
and lhs.dwVolumeSerialNumber == rhs.dwVolumeSerialNumber
and lhs.nFileSizeHigh == rhs.nFileSizeHigh
and lhs.nFileSizeLow == rhs.nFileSizeLow
and lhs.nFileIndexHigh == rhs.nFileIndexHigh
and lhs.nFileIndexLow == rhs.nFileIndexLow
)
except AttributeError:
return False
def __ne__(self, other):
return not self == other
def lookupreg(key, valname=None, scope=None):
"""Look up a key/value name in the Windows registry.
valname: value name. If unspecified, the default value for the key
is used.
scope: optionally specify scope for registry lookup, this can be
a sequence of scopes to look up in order. Default (CURRENT_USER,
LOCAL_MACHINE).
"""
if scope is None:
scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
elif not isinstance(scope, (list, tuple)):
scope = (scope,)
for s in scope:
try:
val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
# never let a Unicode string escape into the wild
return encoding.unitolocal(val)
except EnvironmentError:
pass
expandglobs = True
def statislink(st):
"""check whether a stat result is a symlink"""
return False
def statisexec(st):
"""check whether a stat result is an executable file"""
return False
def bindunixsocket(sock, path):
raise NotImplementedError("unsupported platform")
def _cleanuptemplockfiles(dirname: str, basename: str) -> None:
for susp in os.listdir(dirname):
if not susp.startswith(basename) or not susp.endswith(".tmplock"):
continue
# Multiple processes might be trying to take the lock at the same
# time, they will all create a .tmplock file, let's not remove a file
# that was just created to let the other process continue.
try:
stat = os.lstat(susp)
except OSError:
continue
now = time.mktime(time.gmtime())
filetime = time.mktime(time.gmtime(stat.st_mtime))
if now > filetime + 10:
continue
try:
os.unlink(os.path.join(dirname, susp))
except WindowsError:
pass
def makelock(info: str, pathname: str, checkdeadlock: bool = None) -> "Optional[int]":
dirname = os.path.dirname(pathname)
basename = os.path.basename(pathname)
_cleanuptemplockfiles(dirname, basename)
fd, tname = tempfile.mkstemp(
suffix=".tmplock", prefix="%s.%i." % (basename, os.getpid()), dir=dirname
)
os.write(fd, pycompat.encodeutf8(info))
os.fsync(fd)
os.close(fd)
try:
os.rename(tname, pathname)
except WindowsError:
os.unlink(tname)
raise
def readlock(pathname: str) -> str:
try:
return os.readlink(pathname)
except OSError as why:
if why.errno not in (errno.EINVAL, errno.ENOSYS):
raise
except AttributeError: # no symlink in os
pass
fp = posixfile(pathname)
r = fp.read()
fp.close()
return r
def releaselock(_lockfd: "Optional[int]", pathname: str) -> None:
os.unlink(pathname)
if not encoding.outputencoding:
# The Rust IO requires UTF-8 output.
encoding.outputencoding = "utf-8"
|
import fauxfactory
import pytest
from cfme.base.credential import Credential
from cfme.cloud.tenant import Tenant
from cfme.infrastructure.host import Host
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.log import logger
@pytest.fixture(scope="session")
def category(appliance):
"""
Returns random created category object
Object can be used in all test run session
"""
cg = appliance.collections.categories.create(
name=fauxfactory.gen_alpha(8).lower(),
description=fauxfactory.gen_alphanumeric(length=32),
display_name=fauxfactory.gen_alphanumeric(length=32)
)
yield cg
cg.delete_if_exists()
@pytest.fixture(scope="session")
def tag(category):
"""
Returns random created tag object
Object can be used in all test run session
"""
tag = category.collections.tags.create(
name=fauxfactory.gen_alpha(8).lower(),
display_name=fauxfactory.gen_alphanumeric(length=32)
)
yield tag
tag.delete_if_exists()
@pytest.fixture(scope="module")
def role(appliance):
"""
Returns role object used in test module
"""
role = appliance.collections.roles.create(
name='role{}'.format(fauxfactory.gen_alphanumeric()),
vm_restriction='None')
yield role
role.delete_if_exists()
@pytest.fixture(scope="module")
def group_with_tag(appliance, role, category, tag):
"""
Returns group object with set up tag filter used in test module
"""
group = appliance.collections.groups.create(
description='grp{}'.format(fauxfactory.gen_alphanumeric()),
role=role.name,
tag=([category.display_name, tag.display_name], True)
)
yield group
group.delete_if_exists()
@pytest.fixture(scope="module")
def user_restricted(appliance, group_with_tag, new_credential):
"""
Returns restricted user object assigned
to group with tag filter used in test module
"""
user = appliance.collections.users.create(
name='user{}'.format(fauxfactory.gen_alphanumeric()),
credential=new_credential,
email='xyz@redhat.com',
groups=[group_with_tag],
cost_center='Workload',
value_assign='Database')
yield user
user.delete_if_exists()
@pytest.fixture(scope="module")
def new_credential():
"""
Returns credentials object used for new user in test module
"""
# Todo remove .lower() for principal after 1486041 fix
return Credential(
principal='uid{}'.format(fauxfactory.gen_alphanumeric().lower()), secret='redhat')
@pytest.fixture(scope='function')
def check_item_visibility(tag, user_restricted):
def _check_item_visibility(vis_object, vis_expect):
"""
Args:
vis_object: the object with a tag to check
vis_expect: bool, True if tag should be visible
Returns: None
"""
if vis_expect:
vis_object.add_tag(tag=tag)
else:
tags = vis_object.get_tags()
tag_assigned = any(
object_tags.category.display_name == tag.category.display_name and
object_tags.display_name == tag.display_name for object_tags in tags
)
if tag_assigned:
vis_object.remove_tag(tag=tag)
with user_restricted:
try:
if isinstance(vis_object, Host):
# need to remove the link to the provider from the host,
# so the navigation goes Compute -> Infrastructure -> Hosts, not Providers
vis_object.parent.filters.update({'provider': None})
if isinstance(vis_object, Tenant):
# removing links to the provider so the navigation goes
# Compute -> Clouds -> Tenants, not Providers
vis_object.provider = None
vis_object.parent.filters.update({'provider': None})
navigate_to(vis_object, 'Details')
actual_visibility = True
except Exception:
logger.debug('Tagged item is not visible')
actual_visibility = False
assert actual_visibility == vis_expect
return _check_item_visibility
|
import os
import sys
reload(sys)
sys.setdefaultencoding('UTF8') # @UndefinedVariable
try:
namearchive = raw_input("Ingrese el nombre del archivo que desea transformar: ").replace(".txt", "")
archivo = open(namearchive + ".txt", "r")
except:
print "El archivo no existe!"
exit()
l = []
nw = []
for i in archivo: l.append(i)
for j in range(0, len(l), 2):
num = l[j].replace("{", "").replace("}", "").replace("\n", "")
txt = l[j + 1].replace(" ", "|")
linea = num + " // " + txt
nw.append(linea)
print "Archivo importado correctamente"
archivo.close()
archivo2 = open(namearchive + ".txt", "w")
for i in nw:
archivo2.write(i)
archivo2.close()
try:
os.remove("_import.pyc")
except:
pass
|
class RenameFieldForExact(object):
def __init__(self, untokenizedFields, untokenizedPrefix):
self._untokenizedFields = [f for f in untokenizedFields if not f.endswith('*')]
self._untokenizedFieldPrefixes = [f[:-1] for f in untokenizedFields if f.endswith('*')]
self._untokenizedPrefix = untokenizedPrefix
def canModify(self, expression):
return expression.relation in {'==', 'exact'} and self._hasUntokenizedRenaming(expression.index)
def modify(self, expression):
expression.index = self._untokenizedPrefix + expression.index
def _hasUntokenizedRenaming(self, fieldname):
untokenizedField = self._untokenizedPrefix + fieldname
return untokenizedField in self._untokenizedFields or any(untokenizedField.startswith(prefix) for prefix in self._untokenizedFieldPrefixes)
def filterAndModifier(self):
return self.canModify, self.modify
|
import matplotlib as mpl
from ..options import get_option
from .theme import theme
from .elements import element_rect
class theme_matplotlib(theme):
"""
The default matplotlib look and feel.
The theme can be used (and has the same parameter
to customize) like a :class:`matplotlib.rc_context` manager.
Parameters
-----------
rc : dict, optional
rcParams which should be applied on top of
mathplotlib default.
fname : str, optional
Filename to a matplotlibrc file
use_defaults : bool
If `True` (the default) resets the plot setting
to the (current) `matplotlib.rcParams` values
"""
def __init__(self, rc=None, fname=None, use_defaults=True):
theme.__init__(
self,
aspect_ratio=get_option('aspect_ratio'),
dpi=get_option('dpi'),
figure_size=get_option('figure_size'),
legend_key=element_rect(fill='None', colour='None'),
legend_key_size=16,
panel_spacing=0.1,
strip_background=element_rect(
fill='#D9D9D9', color='#D9D9D9', size=1),
complete=True)
if use_defaults:
_copy = mpl.rcParams.copy()
deprecated_rcparams = (
set(mpl._deprecated_remain_as_none)
| set(mpl._all_deprecated)
)
# no need to a get a deprecate warning just because
# they are still included in rcParams...
for key in deprecated_rcparams:
if key in _copy:
del _copy[key]
if 'tk.pythoninspect' in _copy:
del _copy['tk.pythoninspect']
self._rcParams.update(_copy)
if fname:
self._rcParams.update(mpl.rc_params_from_file(fname))
if rc:
self._rcParams.update(rc)
|
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
import iris.tests as tests
import logging
import cartopy.crs as ccrs
import cf_units
import iris.cube
import iris.coords
import iris.tests.stock
from iris.coord_systems import *
logger = logging.getLogger('tests')
def osgb():
return TransverseMercator(latitude_of_projection_origin=49, longitude_of_central_meridian=-2,
false_easting=-400, false_northing=100,
scale_factor_at_central_meridian=0.9996012717,
ellipsoid=GeogCS(6377563.396, 6356256.909))
def merc():
return Mercator(longitude_of_projection_origin=90.0,
ellipsoid=GeogCS(6377563.396, 6356256.909))
def stereo():
return Stereographic(central_lat=-90, central_lon=-45,
false_easting=100, false_northing=200,
ellipsoid=GeogCS(6377563.396, 6356256.909))
class TestCoordSystemLookup(tests.IrisTest):
def setUp(self):
self.cube = iris.tests.stock.lat_lon_cube()
def test_hit_name(self):
self.assertIsInstance(self.cube.coord_system('GeogCS'),
GeogCS)
def test_hit_type(self):
self.assertIsInstance(self.cube.coord_system(GeogCS),
GeogCS)
def test_miss(self):
self.assertIsNone(self.cube.coord_system(RotatedGeogCS))
def test_empty(self):
self.assertIsInstance(self.cube.coord_system(GeogCS),
GeogCS)
self.assertIsNotNone(self.cube.coord_system(None))
self.assertIsInstance(self.cube.coord_system(None),
GeogCS)
self.assertIsNotNone(self.cube.coord_system())
self.assertIsInstance(self.cube.coord_system(),
GeogCS)
for coord in self.cube.coords():
coord.coord_system = None
self.assertIsNone(self.cube.coord_system(GeogCS))
self.assertIsNone(self.cube.coord_system(None))
self.assertIsNone(self.cube.coord_system())
class TestCoordSystemSame(tests.IrisTest):
def setUp(self):
self.cs1 = iris.coord_systems.GeogCS(6371229)
self.cs2 = iris.coord_systems.GeogCS(6371229)
self.cs3 = iris.coord_systems.RotatedGeogCS(30, 30, ellipsoid=GeogCS(6371229))
def test_simple(self):
a = self.cs1
b = self.cs2
self.assertEqual(a, b)
def test_different_class(self):
a = self.cs1
b = self.cs3
self.assertNotEquals(a, b)
def test_different_public_attributes(self):
a = self.cs1
b = self.cs2
a.foo = 'a'
# check that that attribute was added (just in case)
self.assertEqual(a.foo, 'a')
# a and b should not be the same
self.assertNotEquals(a, b)
# a and b should be the same
b.foo = 'a'
self.assertEqual(a, b)
b.foo = 'b'
# a and b should not be the same
self.assertNotEquals(a, b)
class Test_CoordSystem_xml_element(tests.IrisTest):
def test_rotated(self):
cs = RotatedGeogCS(30, 40, ellipsoid=GeogCS(6371229))
self.assertXMLElement(cs, ("coord_systems", "CoordSystem_xml_element.xml"))
class Test_GeogCS_construction(tests.IrisTest):
# Test Ellipsoid constructor
# Don't care about testing the units, it has no logic specific to this class.
def test_sphere_param(self):
cs = GeogCS(6543210)
self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_sphere.xml"))
def test_no_major(self):
cs = GeogCS(semi_minor_axis=6500000, inverse_flattening=151.42814163388104)
self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_no_major.xml"))
def test_no_minor(self):
cs = GeogCS(semi_major_axis=6543210, inverse_flattening=151.42814163388104)
self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_no_minor.xml"))
def test_no_invf(self):
cs = GeogCS(semi_major_axis=6543210, semi_minor_axis=6500000)
self.assertXMLElement(cs, ("coord_systems", "GeogCS_init_no_invf.xml"))
def test_invalid_ellipsoid_params(self):
# no params
with self.assertRaises(ValueError):
GeogCS()
# over specified
with self.assertRaises(ValueError):
GeogCS(6543210, 6500000, 151.42814163388104)
# under specified
with self.assertRaises(ValueError):
GeogCS(None, 6500000, None)
with self.assertRaises(ValueError):
GeogCS(None, None, 151.42814163388104)
class Test_GeogCS_repr(tests.IrisTest):
def test_repr(self):
cs = GeogCS(6543210, 6500000)
expected = "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)"
self.assertEqual(expected, repr(cs))
class Test_GeogCS_str(tests.IrisTest):
def test_str(self):
cs = GeogCS(6543210, 6500000)
expected = "GeogCS(semi_major_axis=6543210.0, semi_minor_axis=6500000.0)"
self.assertEqual(expected, str(cs))
class Test_GeogCS_as_cartopy_globe(tests.IrisTest):
def test_as_cartopy_globe(self):
cs = GeogCS(6543210, 6500000)
# Can't check equality directly, so use the proj4 params instead.
res = cs.as_cartopy_globe().to_proj4_params()
expected = {'a': 6543210, 'b': 6500000}
self.assertEqual(res, expected)
class Test_GeogCS_as_cartopy_crs(tests.IrisTest):
def test_as_cartopy_crs(self):
cs = GeogCS(6543210, 6500000)
res = cs.as_cartopy_crs()
globe = ccrs.Globe(semimajor_axis=6543210.0,
semiminor_axis=6500000.0, ellipse=None)
expected = ccrs.Geodetic(globe)
self.assertEqual(res, expected)
class Test_RotatedGeogCS_construction(tests.IrisTest):
def test_init(self):
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229))
self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init.xml"))
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50)
self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init_a.xml"))
rcs = RotatedGeogCS(30, 40)
self.assertXMLElement(rcs, ("coord_systems", "RotatedGeogCS_init_b.xml"))
class Test_RotatedGeogCS_repr(tests.IrisTest):
def test_repr(self):
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229))
expected = "RotatedGeogCS(30.0, 40.0, "\
"north_pole_grid_longitude=50.0, ellipsoid=GeogCS(6371229.0))"
self.assertEqual(expected, repr(rcs))
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50)
expected = "RotatedGeogCS(30.0, 40.0, north_pole_grid_longitude=50.0)"
self.assertEqual(expected, repr(rcs))
rcs = RotatedGeogCS(30, 40)
expected = "RotatedGeogCS(30.0, 40.0)"
self.assertEqual(expected, repr(rcs))
class Test_RotatedGeogCS_str(tests.IrisTest):
def test_str(self):
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229))
expected = "RotatedGeogCS(30.0, 40.0, "\
"north_pole_grid_longitude=50.0, ellipsoid=GeogCS(6371229.0))"
self.assertEqual(expected, str(rcs))
rcs = RotatedGeogCS(30, 40, north_pole_grid_longitude=50)
expected = "RotatedGeogCS(30.0, 40.0, north_pole_grid_longitude=50.0)"
self.assertEqual(expected, str(rcs))
rcs = RotatedGeogCS(30, 40)
expected = "RotatedGeogCS(30.0, 40.0)"
self.assertEqual(expected, str(rcs))
class Test_TransverseMercator_construction(tests.IrisTest):
def test_osgb(self):
tm = osgb()
self.assertXMLElement(tm, ("coord_systems", "TransverseMercator_osgb.xml"))
class Test_TransverseMercator_repr(tests.IrisTest):
def test_osgb(self):
tm = osgb()
expected = "TransverseMercator(latitude_of_projection_origin=49.0, longitude_of_central_meridian=-2.0, "\
"false_easting=-400.0, false_northing=100.0, scale_factor_at_central_meridian=0.9996012717, "\
"ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))"
self.assertEqual(expected, repr(tm))
class Test_TransverseMercator_as_cartopy_crs(tests.IrisTest):
def test_as_cartopy_crs(self):
latitude_of_projection_origin = 49.0
longitude_of_central_meridian = -2.0
false_easting = -40000.0
false_northing = 10000.0
scale_factor_at_central_meridian = 0.9996012717
ellipsoid = GeogCS(semi_major_axis=6377563.396,
semi_minor_axis=6356256.909)
tmerc_cs = TransverseMercator(
latitude_of_projection_origin,
longitude_of_central_meridian,
false_easting,
false_northing,
scale_factor_at_central_meridian,
ellipsoid=ellipsoid)
expected = ccrs.TransverseMercator(
central_longitude=longitude_of_central_meridian,
central_latitude=latitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
scale_factor=scale_factor_at_central_meridian,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = tmerc_cs.as_cartopy_crs()
self.assertEqual(res, expected)
class Test_TransverseMercator_as_cartopy_projection(tests.IrisTest):
def test_as_cartopy_projection(self):
latitude_of_projection_origin = 49.0
longitude_of_central_meridian = -2.0
false_easting = -40000.0
false_northing = 10000.0
scale_factor_at_central_meridian = 0.9996012717
ellipsoid = GeogCS(semi_major_axis=6377563.396,
semi_minor_axis=6356256.909)
tmerc_cs = TransverseMercator(
latitude_of_projection_origin,
longitude_of_central_meridian,
false_easting,
false_northing,
scale_factor_at_central_meridian,
ellipsoid=ellipsoid)
expected = ccrs.TransverseMercator(
central_longitude=longitude_of_central_meridian,
central_latitude=latitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
scale_factor=scale_factor_at_central_meridian,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = tmerc_cs.as_cartopy_projection()
self.assertEqual(res, expected)
class Test_Stereographic_construction(tests.IrisTest):
def test_stereo(self):
st = stereo()
self.assertXMLElement(st, ("coord_systems", "Stereographic.xml"))
class Test_Stereographic_repr(tests.IrisTest):
def test_stereo(self):
st = stereo()
expected = "Stereographic(central_lat=-90.0, central_lon=-45.0, "\
"false_easting=100.0, false_northing=200.0, true_scale_lat=None, "\
"ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))"
self.assertEqual(expected, repr(st))
class Test_Stereographic_as_cartopy_crs(tests.IrisTest):
def test_as_cartopy_crs(self):
latitude_of_projection_origin=-90.0
longitude_of_projection_origin=-45.0
false_easting=100.0
false_northing=200.0
ellipsoid=GeogCS(6377563.396, 6356256.909)
st = Stereographic(central_lat=latitude_of_projection_origin,
central_lon=longitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
ellipsoid=ellipsoid)
expected = ccrs.Stereographic(
central_latitude=latitude_of_projection_origin,
central_longitude=longitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = st.as_cartopy_crs()
self.assertEqual(res, expected)
class Test_Stereographic_as_cartopy_projection(tests.IrisTest):
def test_as_cartopy_projection(self):
latitude_of_projection_origin=-90.0
longitude_of_projection_origin=-45.0
false_easting=100.0
false_northing=200.0
ellipsoid=GeogCS(6377563.396, 6356256.909)
st = Stereographic(central_lat=latitude_of_projection_origin,
central_lon=longitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
ellipsoid=ellipsoid)
expected = ccrs.Stereographic(
central_latitude=latitude_of_projection_origin,
central_longitude=longitude_of_projection_origin,
false_easting=false_easting,
false_northing=false_northing,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = st.as_cartopy_projection()
self.assertEqual(res, expected)
class Test_LambertConformal(tests.GraphicsTest):
def test_north_cutoff(self):
lcc = LambertConformal(0, 0, secant_latitudes=(30, 60))
ccrs = lcc.as_cartopy_crs()
self.assertEqual(ccrs.cutoff, -30)
def test_south_cutoff(self):
lcc = LambertConformal(0, 0, secant_latitudes=(-30, -60))
ccrs = lcc.as_cartopy_crs()
self.assertEqual(ccrs.cutoff, 30)
class Test_Mercator_construction(tests.IrisTest):
def test_merc(self):
tm = merc()
self.assertXMLElement(tm, ("coord_systems", "Mercator.xml"))
class Test_Mercator_repr(tests.IrisTest):
def test_merc(self):
tm = merc()
expected = "Mercator(longitude_of_projection_origin=90.0, "\
"ellipsoid=GeogCS(semi_major_axis=6377563.396, "\
"semi_minor_axis=6356256.909))"
self.assertEqual(expected, repr(tm))
class Test_Mercator_as_cartopy_crs(tests.IrisTest):
def test_as_cartopy_crs(self):
longitude_of_projection_origin = 90.0
ellipsoid = GeogCS(semi_major_axis=6377563.396,
semi_minor_axis=6356256.909)
merc_cs = Mercator(
longitude_of_projection_origin,
ellipsoid=ellipsoid)
expected = ccrs.Mercator(
central_longitude=longitude_of_projection_origin,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = merc_cs.as_cartopy_crs()
self.assertEqual(res, expected)
class Test_Mercator_as_cartopy_projection(tests.IrisTest):
def test_as_cartopy_projection(self):
longitude_of_projection_origin = 90.0
ellipsoid = GeogCS(semi_major_axis=6377563.396,
semi_minor_axis=6356256.909)
merc_cs = Mercator(
longitude_of_projection_origin,
ellipsoid=ellipsoid)
expected = ccrs.Mercator(
central_longitude=longitude_of_projection_origin,
globe=ccrs.Globe(semimajor_axis=6377563.396,
semiminor_axis=6356256.909, ellipse=None))
res = merc_cs.as_cartopy_projection()
self.assertEqual(res, expected)
if __name__ == "__main__":
tests.main()
|
import json
import warnings
from past.builtins import basestring
from .template import PYWR_PROTECTED_NODE_KEYS, pywr_template_name
from .core import BasePywrHydra, data_type_from_field
from pywr.nodes import NodeMeta
from hydra_pywr_common import data_type_from_component_type
import logging
log = logging.getLogger(__name__)
class PywrHydraImporter(BasePywrHydra):
def __init__(self, data, template):
super().__init__()
self.template = template
if isinstance(data, basestring):
# argument is a filename
path = data
with open(path, "r") as f:
data = json.load(f)
elif hasattr(data, 'read'):
# argument is a file-like object
data = json.load(data)
self.data = data
self.next_node_id = -1
@classmethod
def from_client(cls, client, data, config_name):
template = client.get_template_by_name(pywr_template_name(config_name))
return cls(data, template)
@property
def name(self):
try:
name = self.data['metadata']['title']
except KeyError:
name = 'A Pywr model.'
warnings.warn('Pywr model data contains no name metadata. Using default name: "{}"'.format(name))
return name
@property
def description(self):
try:
description = self.data['metadata']['description']
except KeyError:
description = ''
return description
def import_data(self, client, project_id):
# First the attributes must be added.
attributes = self.add_attributes_request_data()
# The response attributes have ids now.
response_attributes = client.add_attributes(attributes)
# Convert to a simple dict for local processing.
# TODO change this variable name to map or lookup
attribute_ids = {a.name: a.id for a in response_attributes}
# Now we try to create the network
network = self.add_network_request_data(attribute_ids, project_id)
hydra_network = client.add_network(network)
# Get the added scenario_id. There should only be one scenario
assert len(hydra_network['scenarios']) == 1
scenario_id = hydra_network['scenarios'][0]['id']
return hydra_network.id, scenario_id
def add_attributes_request_data(self):
""" Generate the data for adding attributes to Hydra. """
# Yield attributes from the timestepper ...
for attr in self.attributes_from_meta():
yield attr
# Yield the attributes from the nodes ...
for attr in self.attributes_from_nodes():
yield attr
# ... now the attributes associated with the recorders and parameters.
for key in ('recorders', 'parameters'):
if key not in self.data:
continue
for attr in self.attributes_from_component_dict(key):
yield attr
def add_network_request_data(self, attribute_ids, project_id, projection=None):
""" Return a dictionary of the data required for adding a network to Hydra. """
# Get the network type
network_template_type = self._get_template_type_by_name('pywr', 'NETWORK')
network_template_type_id = network_template_type['id']
# TODO add tables and scenarios.
nodes, links, resource_scenarios = self.convert_nodes_and_edges(attribute_ids)
network_attributes = []
for component_key in ('recorders', 'parameters'):
generator = self.generate_component_resource_scenarios(component_key, attribute_ids, encode_to_json=True)
for resource_attribute, resource_scenario in generator:
network_attributes.append(resource_attribute)
resource_scenarios.append(resource_scenario)
# TODO timestepper data is on the scenario.
for component_key in ('metadata', 'timestepper'):
generator = self.generate_component_resource_scenarios(component_key, attribute_ids, encode_to_json=False)
for resource_attribute, resource_scenario in generator:
network_attributes.append(resource_attribute)
resource_scenarios.append(resource_scenario)
scenario = self.make_scenario(resource_scenarios)
data = {
"name": self.name,
"description": self.description,
"project_id": project_id,
"links": links,
"nodes": nodes,
"layout": None,
"scenarios": [scenario, ],
"projection": projection,
"attributes": network_attributes,
'types': [{'id': network_template_type_id}]
}
return data
def make_scenario(self, resource_scenarios=None):
""" Make the request data for a Hydra scenario. """
if resource_scenarios is None:
resource_scenarios = []
scenario = {
"name": "Baseline",
"description": "Baseline scenario (auto-generated by Pywr app)",
"resourcescenarios": resource_scenarios
}
return scenario
def attributes_from_nodes(self):
""" Generator to convert Pywr nodes data in to Hydra attribute data.
This function is intended to be used to convert Pywr components (e.g. recorders, parameters, etc.) data
in to a format that can be imported in to Hydra. The Pywr component data is a dict of dict with each
sub-dict represent a single component (see the "recorder" or "parameters" section of the Pywr JSON format). This
function returns Hydra data to add a Attribute for each of the components in the outer dict.
"""
nodes = self.data['nodes']
attributes = set()
for node in nodes:
node_type = node['type'].lower()
node_klass = NodeMeta.node_registry[node_type]
schema = node_klass.Schema()
# Create an attribute for each field in the schema.
for name, field in schema.fields.items():
if name in PYWR_PROTECTED_NODE_KEYS:
continue
attributes.add(name)
for attr in sorted(attributes):
yield {
'name': attr,
'description': ''
}
def attributes_from_meta(self):
""" Generator to convert Pywr timestepper data in to Hydra attribute data. """
for meta_key in ('metadata', 'timestepper'):
for key in self.data[meta_key].keys():
# Prefix these names with Pywr JSON section.
yield {
'name': '{}.{}'.format(meta_key, key),
'description': ''
}
def _get_template_type_by_name(self, name, resource_type=None):
for template_type in self.template['templatetypes']:
if name == template_type['name']:
if resource_type is None or template_type['resource_type'] == resource_type:
return template_type
raise ValueError('Template does not contain node of type "{}".'.format(name))
def convert_nodes_and_edges(self, attribute_ids):
""" Convert a tuple of (nodes, links) of Hydra data based on the given Pywr data. """
pywr_nodes = self.data['nodes']
pywr_edges = self.data['edges']
def find_node_id(node_name):
for hydra_node in hydra_nodes:
if hydra_node['name'] == node_name:
return hydra_node['id']
raise ValueError('Node name "{}" not found in node data.'.format(node_name))
# TODO make this object properties
node_id = -1
link_id = -1
hydra_nodes = []
hydra_links = [] # Note the change in nomenclature pywr->edges, hydra->links
hydra_resource_scenarios = []
# First generate the hydra node data
for pywr_node in pywr_nodes:
try:
comment = pywr_node['comment']
except KeyError:
comment = None
# Get the type for this node from the template
# Pywr keeps a registry of lower case node types.
pywr_node_type = pywr_node['type'].lower()
node_template_type = self._get_template_type_by_name(pywr_node_type, 'NODE')
node_template_type_id = node_template_type['id']
# Now make the attributes
resource_attributes = []
for resource_attribute, resource_scenario in self.generate_node_resource_scenarios(pywr_node, attribute_ids):
resource_attributes.append(resource_attribute)
hydra_resource_scenarios.append(resource_scenario)
# Try to get a coordinate from the pywr_node
x, y = None, None
try:
x, y = pywr_node['position']['geographic']
except KeyError:
pass
hydra_node = {
'id': node_id,
'name': pywr_node['name'],
'description': comment,
'layout': None, # TODO this is a JSON string
'x': x, # TODO add some tests with coordinates.
'y': y,
'attributes': resource_attributes,
'types': [{'id': node_template_type_id}]
}
hydra_nodes.append(hydra_node)
node_id -= 1
# All Pywr edges have the same type
edge_template_type = self._get_template_type_by_name('edge', 'LINK')
edge_template_type_id = edge_template_type['id']
for pywr_edge in pywr_edges:
# TODO slots
if len(pywr_edge) > 2:
raise NotImplementedError('Edges with slot definitions are not currently supported.')
node_1_name, node_2_name = pywr_edge
hydra_link = {
'id': link_id,
'name': "{} to {}".format(node_1_name, node_2_name),
'description': None,
'layout': None,
'node_1_id': find_node_id(node_1_name),
'node_2_id': find_node_id(node_2_name),
'attributes': [], # Links have no resource attributes
'types': [{'id': edge_template_type_id}]
}
hydra_links.append(hydra_link)
link_id -= 1
return hydra_nodes, hydra_links, hydra_resource_scenarios
def generate_node_resource_scenarios(self, pywr_node, attribute_ids):
for ra, rs in self.generate_node_schema_resource_scenarios(pywr_node, attribute_ids):
yield ra, rs
for component_key in ('parameters', 'recorders'):
for ra, rs in self.generate_node_component_resource_scenarios(pywr_node, component_key,
attribute_ids, encode_to_json=True):
yield ra, rs
def generate_node_schema_resource_scenarios(self, pywr_node, attribute_ids):
""" Generate resource attribute, resource scenario and datasets for a Pywr node.
"""
node_type = pywr_node['type'].lower()
node_klass = NodeMeta.node_registry[node_type]
schema = node_klass.Schema()
# Create an attribute for each field in the schema.
for name, field in schema.fields.items():
if name not in pywr_node:
continue # Skip missing fields
if name in PYWR_PROTECTED_NODE_KEYS:
continue
# Non-protected keys represent data that must be added to Hydra.
data_type = data_type_from_field(field)
# Key is the attribute name. The attributes need to already by added to the
# database and hence have a valid id.
attribute_id = attribute_ids[name]
yield self._make_dataset_resource_attribute_and_scenario(name, pywr_node[name], data_type,
attribute_id, encode_to_json=True)
def generate_node_component_resource_scenarios(self, pywr_node, component_key, attribute_ids, **kwargs):
try:
components = self.data[component_key]
except KeyError:
components = {}
node_name = pywr_node['name']
for component_name, component_data in components.items():
# Filter components to only include those that should be stored at the node level
if not self.is_component_a_node_attribute(component_name, node_name):
continue
data_type = data_type_from_component_type(component_key, component_data['type']).tag
attribute_name = self._attribute_name(component_key, component_name)
# This the attribute corresponding to the component.
# It should have a positive id and already be entered in the hydra database.
attribute_id = attribute_ids[attribute_name]
yield self._make_dataset_resource_attribute_and_scenario(attribute_name, component_data, data_type,
attribute_id, **kwargs)
def _attribute_name(self, component_key, component_name):
if component_key in ('parameters', 'recorders'):
if self._node_attribute_component_delimiter in component_name:
attribute_name = component_name.split(self._node_attribute_component_delimiter, 1)[-1]
else:
attribute_name = component_name
else:
attribute_name = '{}.{}'.format(component_key, component_name)
return attribute_name
def attributes_from_component_dict(self, component_key):
""" Generator to convert Pywr components data in to Hydra attribute data.
This function is intended to be used to convert Pywr components (e.g. recorders, parameters, etc.) data
in to a format that can be imported in to Hydra. The Pywr component data is a dict of dict with each
sub-dict represent a single component (see the "recorder" or "parameters" section of the Pywr JSON format). This
function returns Hydra data to add a Attribute for each of the components in the outer dict.
"""
components = self.data[component_key]
for component_name in components.keys():
attribute_name = self._attribute_name(component_key, component_name)
yield {
'name': attribute_name,
'description': ''
}
def generate_component_resource_scenarios(self, component_key, attribute_ids, **kwargs):
""" Convert from Pywr components to resource attributes and resource scenarios.
This function is intended to be used to convert Pywr components (e.g. recorders, parameters, etc.) data
in to a format that can be imported in to Hydra. The Pywr component data is a dict of dict with each
sub-dict represent a single component (see the "recorder" or "parameters" section of the Pywr JSON format). This
function returns a list of resource attributes and resource scenarios. These can be used to import the data
to Hydra.
"""
try:
components = self.data[component_key]
except KeyError:
components = {}
for component_name, component_data in components.items():
if component_key == 'metadata':
if component_name in ('title', 'description'):
# These names are saved on the hydra network directly (name and descripton)
# therefore do not add as a attributes as well.
continue
# Determine whether this component should be store on as a node attribute.
if component_key in ('parameters', 'recorders') and \
self.is_component_a_node_attribute(component_name):
continue
# Determine the data type
if component_key in ('parameters', 'recorders'):
data_type = data_type_from_component_type(component_key, component_data['type']).tag
else:
if component_key == 'timestepper' and component_name == 'timestep':
data_type = 'SCALAR'
else:
data_type = 'DESCRIPTOR'
attribute_name = self._attribute_name(component_key, component_name)
# This the attribute corresponding to the component.
# It should have a positive id and already be entered in the hydra database.
attribute_id = attribute_ids[attribute_name]
yield self._make_dataset_resource_attribute_and_scenario(attribute_name, component_data, data_type,
attribute_id, **kwargs)
|
def get_real_floor(n):
if n <= 0:
return n
return n - 1 - (n >= 13)
|
import time
import maestro
try:
servo = maestro.Controller()
servo.setRange(0,3000,8200)
servo.setRange(1,4000,8000)
# about 5 clicks per full motion
# 1040 for left/right + is left, - is right.
# 800 for up/down + is up, - is down.
x = servo.getPosition(1) - 800
servo.setAccel(1,6)
servo.setTarget(1,x)
finally:
servo.close
|
r"""
Total Key press by program
==========================
Show the use of the keyboard in each program
"""
import matplotlib.pyplot as plt
import sqlite3
import pandas as pd
con = sqlite3.connect("/home/oscar/dev/selfspy/.testpy3/selfspy.sqlite")
process = pd.read_sql_query("SELECT id, name from process", con,
index_col='id')
keys = pd.read_sql_query("SELECT id, process_id, nrkeys from keys", con,
index_col='id')
con.close()
total_keypress = keys.groupby('process_id').sum()
total_keypress = pd.merge(total_keypress, process,
left_index=True, right_index=True)
total_keypress.sort_values(by='nrkeys', ascending=False)\
.plot(x='name', kind='bar', logy=True,
legend=False)
plt.ylabel('Total Key Presses')
plt.xlabel('')
|
{
'name': 'Purchase Date to Move Out Date',
'version': '1.0',
'category': 'Sale',
'description': """
Change purchase Order Line Date impact Customer (out) Moves Date
================================================================
The aim of this module is to deal with floating picking in dates that impact
picking out dates.
* When a purchase order is validated, if the purchaser change dates on purchase
order line dates, it will changes the date of the associated forcast move in.
Technical Information
---------------------
* This module remove readonly features on purchase order line, when order is
confirmed. Use this feature with caution.
Copyright
---------
* Noemis (http://www.noemis.fr)
""",
'author': 'Sylvain LE GAL',
'website': 'http://www.noemis.fr',
'license': 'AGPL-3',
'depends': [
'purchase',
'stock',
'sale',
'sale_order_dates',
'sale_stock'
],
'data': [
'views/picking_view.xml',
'views/purchase_view.xml',
],
}
|
"""
This script was created by Sergey Tomin for Workshop: Designing future X-ray FELs. Source and license info is on GitHub.
August 2016.
Modified in 2017,
S.Tomin
"""
from time import time
from copy import deepcopy
from ocelot import *
from ocelot.gui.accelerator import *
from ocelot.adaptors.astra2ocelot import *
D_14 = Drift(l=0.2216 + 0.0996, eid='D_14')
D_15 = Drift(l=0.3459, eid='D_15')
D_22 = Drift(l=0.2043, eid='D_22')
D_23 = Drift(l=0.085+0.4579 + 0.2211 + 0.085, eid='D_23')
phi1 = 18.7268
V1 = 18.50662e-3/np.cos(phi1*pi/180)
C_A1_1_1_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.1.I1')
C_A1_1_2_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.2.I1')
C_A1_1_3_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.3.I1')
C_A1_1_4_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.4.I1')
C_A1_1_5_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.5.I1')
C_A1_1_6_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.6.I1')
C_A1_1_7_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.7.I1')
C_A1_1_8_I1 = Cavity(l=1.0377, v=V1, freq=1.3e9, phi=phi1, eid='C.A1.1.8.I1')
phi13 = 180
V13 = -20.2E-3/8/np.cos(phi13*pi/180)
C3_AH1_1_1_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.1.I1')
C3_AH1_1_2_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.2.I1')
C3_AH1_1_3_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.3.I1')
C3_AH1_1_4_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.4.I1')
C3_AH1_1_5_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.5.I1')
C3_AH1_1_6_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.6.I1')
C3_AH1_1_7_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.7.I1')
C3_AH1_1_8_I1 = Cavity(l=0.346, v=V13, freq=3.9e9, phi=phi13, eid='C3.AH1.1.8.I1')
Q_37_I1 = Quadrupole(l=0.3, k1=-1.537886, tilt=0.0, eid='Q.37.I1')
Q_38_I1 = Quadrupole(l=0.3, k1=1.435078, tilt=0.0, eid='Q.38.I1')
start_sim = Marker()
cell = (start_sim, D_14, C_A1_1_1_I1, D_15, C_A1_1_2_I1,
D_15, C_A1_1_3_I1, D_15, C_A1_1_4_I1, D_15, C_A1_1_5_I1, D_15, C_A1_1_6_I1,
D_15, C_A1_1_7_I1, D_15, C_A1_1_8_I1, D_22, Q_37_I1, D_23, Q_38_I1)
sigma_x = 0.000231507245956
sigma_px =0.000204206874319
sigma_y = 0.000231583942392
sigma_py =0.000204272734636
np.random.seed(0)
x = np.random.randn(200000)*sigma_x
px = np.random.randn(200000)*sigma_px
y = np.random.randn(200000)*sigma_y
py = np.random.randn(200000)*sigma_py
cov_t_p = [[ 6.89508231e-07, -2.98688604e-07],
[ -2.98688604e-07, 1.87434257e-07]]
long_dist = np.random.multivariate_normal((0, 0), cov_t_p, 200000)
tau = long_dist[:, 0]
dp = long_dist[:, 1]
p_array_init = ParticleArray(n=200000)
p_array_init.E = 0.0065 # GeV
p_array_init.rparticles[0] = x
p_array_init.rparticles[1] = px
p_array_init.rparticles[2] = y
p_array_init.rparticles[3] = py
p_array_init.rparticles[4] = tau
p_array_init.rparticles[5] = dp
Q = 5e-10
p_array_init.q_array = np.ones(200000)*Q/200000
tws0 = Twiss()
tws0.beta_x = 1.59966676201
tws0.beta_y = 1.60018325757
tws0.alpha_x = -0.995487979563
tws0.alpha_y = -0.996116091572
tws0.mux = 0
tws0.muy = 0
tws0.E = 0.0065
bt = BeamTransform(tws=tws0)
bt.apply(p_array_init,dz=0)
sI1, I1 = get_current(p_array_init, num_bins=200)
method = MethodTM()
method.global_method = SecondTM
lat = MagneticLattice(cell, method=method)
tws = twiss(lat, tws0)
plot_opt_func(lat, tws, top_plot=["E"], fig_name=0, legend=False)
plt.show()
sc1 = SpaceCharge()
sc1.nmesh_xyz = [63, 63, 63]
sc1.step = 1
sc5 = SpaceCharge()
sc5.nmesh_xyz = [63, 63, 63]
sc5.step = 5
navi = Navigator(lat)
navi.add_physics_proc(sc1, lat.sequence[0], C_A1_1_2_I1)
navi.add_physics_proc(sc5, C_A1_1_2_I1, lat.sequence[-1])
navi.unit_step = 0.02
p_array = deepcopy(p_array_init)
start = time()
tws_track, p_array = track(lat, p_array, navi)
print("time exec: ", time() - start, "sec")
plot_opt_func(lat, tws_track, top_plot=["E"], fig_name=0, legend=False)
sI1_e, I1_e = get_current(p_array, num_bins=200)
plt.figure(1)
plt.title("energy distribution: Start-End")
plt.plot(-p_array_init.tau()*1000, p_array_init.p(), 'r.', label="start")
plt.plot(-p_array.tau()*1000, p_array.p(), 'b.', label="end")
plt.legend()
plt.xlabel("s, mm")
plt.ylabel("dp/p")
plt.grid(True)
plt.figure(2)
plt.title("current: Start-End")
plt.plot(sI1*1000, I1, "r", label="start")
plt.plot(sI1_e*1000, I1_e, "b", label="end")
plt.legend()
plt.xlabel("s, mm")
plt.ylabel("I, A")
plt.grid(True)
plt.show()
|
"""164. Numbers for which no three consecutive digits have a sum greater than a given value
https://projecteuler.net/problem=164
How many 20 digit numbers n (without any leading zero) exist such that no
three consecutive digits of n have a sum greater than 9?
"""
|
import numpy as np
from xlwings import Range
from datetime import datetime
from scipy import interpolate
import write_utils as wu
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import map_interactive as mi
from map_interactive import pll
import map_utils as mu
class dict_position:
"""
Purpose:
Class that creates an easy storage for position coordinates.
Encompasses array values of positions, altitude, time, dist.
Along with program to update excel spreadsheet with info, read spreadsheet data,
and update calculations for distances
Inputs: (at init)
lon0: [degree] initial longitude (optional, defaults to Namibia Walvis bay airport), can be string
lat0: [degree] initial latitude (optional, defaults to Namibia Walvis bay airport), can be string
speed: [m/s] speed of aircraft defaults to p3 value of 150 m/s (optional)
UTC_start: [decimal hours] time of takeoff, defaults to 7.0 UTC (optional)
UTC_conversion: [decimal hours] conversion (dt) used to change utc to local time (optional), local = utc + dt
alt0: [m] initial altitude of the plane, airport altitude (optional)
verbose: if True then outputs many command line comments while interaction is executed, defaults to False
filename: (optional) if set, opens the excel file and starts the interaction with the first sheet
datestr: (optional) The flight day in format YYYY-MM-DD, if not set, default to today in utc.
color: (optional) the color of the flight path defaults to red.
sheet_num: (optional, defaults to 1) the sheet number to switch to
profile: (optional) if set, uses a dict of basemap profile to set for the initial lat lons, alt, utc_start, utc_conversion, name
Outputs:
dict_position class
Dependencies:
numpy
xlwings
Excel (win or mac)
map_interactive
map_utils
simplekml
gpxpy
tempfile
os
datetime
Pyephem
Required files:
none
Example:
...
Modification History:
Written: Samuel LeBlanc, 2015-08-07, Santa Cruz, CA
Modified: Samuel LeBlanc, 2015-08-11, Santa Cruz, CA
- update and bug fixes
Modified: Samuel LeBlanc, 2015-08-14, NASA Ames, CA
- added save to kml functionality
Modified: Samuel LeBlanc, 2015-08-18, NASA Ames, CA
- added open excel functionality via the filename option and extra method
Modified: Samuel LeBlanc, 2015-08-21, Santa Cruz, CA
- added save to GPX functionality
- added datestr for keeping track of flight days
- added functionality for comments and space for sza/azi
Modified: Samuel LeBlanc, 2015-08-24, Santa Cruz, CA
- added multi flight path handling funcitonality, by generating new sheets
- added newsheetonly keyword and name keyword
Modified: Samuel LeBlanc, 2015-09-02, Santa Cruz, CA
- added color keyword
Modified: Samuel LeBlanc, 2015-09-10, NASA Ames, Santa Cruz, CA
- added init codes for loading a single sheet of a workbook
Modified: Samuel LeBlanc, 2015-09-15, NASA Ames, CA
- added handling of the profile dict of lat lon and starting positions
Modified: Samuel LeBlanc, 2016-07-10, NASA Ames, from Santa Cruz, CA
- added handling of platform info from external files.
- added bearing info to excel file and flight planning version info with date
Modified: Samuel LeBlanc, 2016-07-22, NASA Ames, from Santa Cruz, CA
- modified kml saving to also save a kmz with included icons
- modified kml/kmz to have the altitude and link to ground set.
- removed dependency of Pysolar, fixed bug in azimuth calculations
Modified: Samuel LeBlanc, 2016-07-28, NASA Ames, CA
- fixed utc convertion issue when reading in an excel file
Modified: Samuel LeBlanc, 2016-08-25, NASA P3, transit from Barbados to Ascension
- added inserts method to insert a point in between other points.
Modified: Samuel LeBlanc, 2016-08-30, Swakopmund, Namibia
- added a force speed calculation
Modified: Samuel LeBlanc, 2016-08-31, Swakopmund, Namibia
- fixed saving for pilots, added delay time in comments
Modified: Samuel LeBlanc, 2019-06-03, Santa Cruz, CA
- Calc_climb_time typo in reading platform.txt file.
"""
import numpy as np
from xlwings import Range,Sheet
from datetime import datetime
import map_interactive as mi
from map_interactive import pll
import map_utils as mu
def __init__(self,lon0='14 38.717E',lat0='22 58.783S',speed=150.0,UTC_start=7.0,
UTC_conversion=+1.0,alt0=0.0,
verbose=False,filename=None,datestr=None,
newsheetonly=False,name='P3 Flight path',sheet_num=1,color='red',
profile=None,campaign='None',version='v1.09',platform_file='platform.txt'):
if profile:
lon0,lat0,UTC_start = profile['Start_lon'],profile['Start_lat'],profile['UTC_start']
UTC_conversion,alt0,name,campaign = profile['UTC_conversion'],profile['start_alt'],profile['Plane_name'],profile['Campaign']
self.__version__ = version
self.comments = [' ']
self.lon = np.array([pll(lon0)])
self.lat = np.array([pll(lat0)])
self.speed = np.array([speed])
self.alt = np.array([alt0])
self.UTC_conversion = UTC_conversion
self.utc = np.array([UTC_start])
self.UTC = self.utc
self.legt = self.UTC*0.0
self.dist = self.UTC*0.0
self.cumdist = self.UTC*0.0
self.cumlegt = self.legt
self.delayt = self.legt
self.bearing = self.lon*0.0
self.endbearing = self.lon*0.0
self.turn_deg = self.lon*0.0
self.turn_time = self.lon*0.0
self.climb_time = self.lon*0.0
self.sza = self.lon*0.0
self.azi = self.lon*0.0
self.datetime = self.lon*0.0
self.speed_kts = self.speed*1.94384449246
self.alt_kft = self.alt*3.28084/1000.0
self.head = self.legt
self.color = color
self.googleearthopened = False
self.netkml = None
self.verbose = verbose
self.name = name
self.campaign = campaign
self.platform, self.p_info,use_file = self.get_platform_info(name,platform_file)
self.pilot_format = self.p_info.get('pilot_format','DD MM SS')
if use_file:
print 'Using platform data for: {} from platform file: {}'.format(self.platform,platform_file)
else:
print 'Using platform data for: {} from internal defaults'.format(self.platform)
if datestr:
self.datestr = datestr
else:
self.datestr = datetime.utcnow().strftime('%Y-%m-%d')
self.calculate()
if not filename:
self.sheet_num = sheet_num
self.wb = self.Create_excel(newsheetonly=newsheetonly,name=name)
try:
self.write_to_excel()
except:
print 'writing to excel failed'
else:
self.wb = self.Open_excel(filename=filename,sheet_num=sheet_num,campaign=campaign,platform_file=platform_file)
self.check_xl()
self.calculate()
self.write_to_excel()
self.sheet_num = sheet_num
def get_platform_info(self,name,filename):
"""
Function that reads the platform info from seperate file.
If sucessfuly uses these info to prepare speeds, altitudes, climb time, turn time, and others
"""
from ml import read_prof_file
import tkMessageBox
platform = None
p_info = None
use_file = False
try:
p = read_prof_file(filename)
for d in p:
if any(o in name for o in d['names']):
platform = d['Platform']
p_info = d
use_file = True
break
if not p_info:
tkMessageBox.showwarning('Platform not found','Platform values not found in file: {}.\nUsing internal defaults.'.format(filename))
platform = self.check_platform(name)
p_info = self.default_p_info(platform)
except IOError:
print '** Error reading platform information file: {} **'.format(filename)
try:
from gui import gui_file_select_fx
filename_new = gui_file_select_fx(ext='platform.txt',ftype=[('All files','*.*'),('Platform file','*.txt')])
p = read_prof_file(filename_new)
for d in p:
if any(o in name for o in d['names']):
platform = d['Platform']
p_info = d
use_file = True
break
except IOError:
print '** Error reading platform information file: {} **'.format(filename)
print '** Using default platform profiles **'
platform = self.check_platform(name)
p_info = self.default_p_info(platform)
if p_info['warning']:
tkMessageBox.showwarning('Check needed','Platform default speeds and altitude may be off for {}. Please double check.'.format(platform))
return platform, p_info, use_file
def default_p_info(self,platform):
'function that returns the default dict of platform info'
if platform=='p3':
p_info = {'Platform':'p3','names':['p3','P3','P-3','p-3','p 3','P 3'],
'max_alt':7000.0,'base_speed':110.0,'speed_per_alt':0.0070,
'max_speed':155.0,'max_speed_alt':5000.0,'descent_speed_decrease':15.0,
'climb_vert_speed':5.0,'descent_vert_speed':-5.0,'alt_for_variable_vert_speed':6000.0,
'vert_speed_base':4.5,'vert_speed_per_alt':7e-05,
'rate_of_turn':None,'turn_bank_angle':15.0,
'warning':False}
elif platform=='er2':
p_info = {'Platform':'er2','names':['er2','ER2','ER-2','er-2','ER 2','er 2'],
'max_alt':19000.0,'base_speed':70.0,'speed_per_alt':0.0071,
'max_speed':300.0,'max_speed_alt':30000.0,'descent_speed_decrease':0.0,
'climb_vert_speed':10.0,'descent_vert_speed':-10.0,'alt_for_variable_vert_speed':0.0,
'vert_speed_base':24.0,'vert_speed_per_alt':0.0011,
'rate_of_turn':None,'turn_bank_angle':15.0,
'warning':False}
elif platform=='dc8':
p_info = {'Platform':'dc8','names':['dc8','DC8','DC-8','dc-8','DC 8','dc 8'],
'max_alt':13000.0,'base_speed':130.0,'speed_per_alt':0.0075,
'max_speed':175.0,'max_speed_alt':6000.0,'descent_speed_decrease':15.0,
'climb_vert_speed':15.0,'descent_vert_speed':-10.0,'alt_for_variable_vert_speed':0.0,
'vert_speed_base':15.0,'vert_speed_per_alt':0.001,
'rate_of_turn':None,'turn_bank_angle':15.0,
'warning':False}
elif platform=='c130':
p_info = {'Platform':'c130','names':['c130','C130','C-130','c-130','C 130','c 130'],
'max_alt':7500.0,'base_speed':130.0,'speed_per_alt':0.0075,
'max_speed':175.0,'max_speed_alt':6000.0,'descent_speed_decrease':15.0,
'climb_vert_speed':10.0,'descent_vert_speed':-10.0,'alt_for_variable_vert_speed':0.0,
'vert_speed_base':10.0,'vert_speed_per_alt':0.001,
'rate_of_turn':None,'turn_bank_angle':20.0,
'warning':False}
elif platform=='bae146':
p_info = {'Platform':'bae146','names':['bae','BAE','146'],
'max_alt':8500.0,'base_speed':130.0,'speed_per_alt':0.002,
'max_speed':150.0,'max_speed_alt':8000.0,'descent_speed_decrease':15.0,
'climb_vert_speed':5.0,'descent_vert_speed':-5.0,'alt_for_variable_vert_speed':8000.0,
'vert_speed_base':4.5,'vert_speed_per_alt':7e-05,
'rate_of_turn':None,'turn_bank_angle':20.0,
'warning':True}
elif platform=='ajax':
p_info = {'Platform':'ajax','names':['ajax','Ajax','AJAX','alphajet','alpha','alpha-jet'],
'max_alt':9500.0,'base_speed':160.0,'speed_per_alt':0.09,
'max_speed':250.0,'max_speed_alt':9000.0,'descent_speed_decrease':5.0,
'climb_vert_speed':5.0,'descent_vert_speed':-5.0,'alt_for_variable_vert_speed':8000.0,
'vert_speed_base':4.5,'vert_speed_per_alt':7e-05,
'rate_of_turn':None,'turn_bank_angle':25.0,
'warning':True}
else:
p_info = {'Platform':'p3','names':['p3','P3','P-3','p-3','p 3','P 3'],
'max_alt':7000.0,'base_speed':110.0,'speed_per_alt':0.007,
'max_speed':155.0,'max_speed_alt':5000.0,'descent_speed_decrease':15.0,
'climb_vert_speed':5.0,'descent_vert_speed':-5.0,'alt_for_variable_vert_speed':6000.0,
'vert_speed_base':4.5,'vert_speed_per_alt':7e-05,
'rate_of_turn':None,'turn_bank_angle':15.0,
'warning':True}
return p_info
def check_platform(self,name):
'Simple program that check the name of the flight path to platforms names'
if any(p in name for p in ['p3','P3','P-3','p-3','p 3','P 3']): platform = 'p3'
if any(p in name for p in ['er2','ER2','ER-2','er-2','ER 2','er 2']): platform = 'er2'
if any(p in name for p in ['dc8','DC8','DC-8','dc-8','DC 8','dc 8']): platform = 'dc8'
if any(p in name for p in ['c130','C130','C-130','c-130','C 130','c 130']): platform = 'c130'
if any(p in name for p in ['bae','BAE','146']): platform = 'bae146'
try:
if not platform: platform = 'NA'
except UnboundLocalError:
platform = 'NA'
return platform
def get_rate_of_turn(self):
'Function to calculate the rate of turn of the plane'
if self.p_info.get('rate_of_turn'):
rate_of_turn = self.p_info.get('rate_of_turn')
elif self.p_info.get('turn_bank_angle'):
rate_of_turn = 1091.0*np.tan(self.p_info['turn_bank_angle']*np.pi/180)/self.speed[0]
else:
default_bank_angle = 15.0
rate_of_turn = 1091.0*np.tan(default_bank_angle*np.pi/180)/self.speed[0] # degree per second
return rate_of_turn
def calculate(self):
"""
Program to fill in all the missing pieces in the dict_position class
Involves converting from metric to aviation units
Involves calculating distances
Involves calculating time of flight local and utc
Fills in the waypoint numbers
Assumes that blank spaces/nan are to be filled with new calculations
"""
self.rate_of_turn = self.get_rate_of_turn()
if not np.isfinite(self.rate_of_turn):
self.rate_of_turn = 2.4
self.n = len(self.lon)
self.WP = range(1,self.n+1)
for i in xrange(self.n-1):
self.dist[i+1] = mu.spherical_dist([self.lat[i],self.lon[i]],[self.lat[i+1],self.lon[i+1]])
if np.isfinite(self.alt.astype(float)[i+1]):
self.alt_kft[i+1] = self.alt[i+1]*3.28084/1000.0
elif np.isfinite(self.alt_kft.astype(float)[i+1]):
self.alt[i+1] = self.alt_kft[i+1]*1000.0/3.28084
else:
self.alt[i+1] = self.get_alt(self.alt[0],self.alt[i])
self.alt_kft[i+1] = self.alt[i+1]*3.28084/1000.0
if np.isfinite(self.speed.astype(float)[i+1]):
self.speed_kts[i+1] = self.speed[i+1]*1.94384449246
elif np.isfinite(self.speed_kts.astype(float)[i+1]):
self.speed[i+1] = self.speed_kts[i+1]/1.94384449246
else:
self.speed[i+1] = self.calcspeed(self.alt[i],self.alt[i+1])
self.speed_kts[i+1] = self.speed[i+1]*1.94384449246
self.bearing[i] = mu.bearing([self.lat[i],self.lon[i]],[self.lat[i+1],self.lon[i+1]])
self.endbearing[i] = (mu.bearing([self.lat[i+1],self.lon[i+1]],[self.lat[i],self.lon[i]])+180)%360.0
try:
self.bearing[i+1] = mu.bearing([self.lat[i+1],self.lon[i+1]],[self.lat[i+2],self.lon[i+2]])
except:
self.bearing[i+1] = self.endbearing[i]
try:
self.turn_deg[i+1] = abs(self.endbearing[i]-self.bearing[i+1])
except:
self.turn_deg[i+1] = 0.0
self.turn_time[i+1] = (self.turn_deg[i+1]/self.rate_of_turn)/60.0
if not np.isfinite(self.delayt.astype(float)[i+1]):
self.delayt[i+1] = self.turn_time[i+1]
#else:
# self.delayt[i+1] = self.delayt[i+1]+self.turn_time[i+1]
self.climb_time[i+1] = self.calc_climb_time(self.alt[i],self.alt[i+1]) #defaults to P3 speed
self.legt[i+1] = (self.dist[i+1]/(self.speed[i+1]/1000.0))/3600.0
if self.legt[i+1] < self.climb_time[i+1]/60.0:
self.legt[i+1] = self.climb_time[i+1]/60.0
self.legt[i+1] += self.delayt[i+1]/60.0
self.utc[i+1] = self.utc[i]+self.legt[i+1]
if not np.isfinite(self.utc[i+1]):
print self.utc
import pdb; pdb.set_trace()
self.local = self.utc+self.UTC_conversion
self.dist_nm = self.dist*0.53996
self.cumdist = self.dist.cumsum()
self.cumdist_nm = self.dist_nm.cumsum()
self.cumlegt = np.nan_to_num(self.legt).cumsum()
self.datetime = self.calcdatetime()
self.sza,self.azi = mu.get_sza_azi(self.lat,self.lon,self.datetime)
self.time2xl()
def force_calcspeed(self):
"""
Program to override the current speed written in and calculate a new one
"""
self.n = len(self.lon)
for i in xrange(self.n-1):
self.speed[i+1] = self.calcspeed(self.alt[i],self.alt[i+1])
self.speed_kts[i+1] = self.speed[i+1]*1.94384449246
def calcspeed(self,alt0,alt1):
"""
Simple program to estimate the speed of the aircraft:
P3 from Steven Howell based on TRACE-P
ER2 from Samuel LeBlanc based on SEAC4RS
"""
if self.p_info.get('base_speed'):
TAS = self.p_info['base_speed'] + alt1*self.p_info['speed_per_alt']
if alt1>self.p_info['max_speed_alt']:
TAS = self.p_info['max_speed']
if alt1>alt0+200.0:
TAS = TAS-self.p_info['descent_speed_decrease']
else:
if self.platform=='p3':
TAS = 130.0+alt1/1000.0*7.5
if alt1>6000.0:
TAS = 130.0+6*7.5
if alt1>alt0+200.0:
TAS = TAS -15.0
elif self.platform=='er2':
TAS = 70+alt0*0.0071
else:
TAS = 130.0
if not np.isfinite(TAS):
TAS = 130.0
return TAS
def get_alt(self,alt0,alti):
'Program to guesstimate the cruising altitude'
if alti!=alt0:
return alti
if self.p_info.get('max_alt'):
return self.p_info['max_alt']
else:
if self.platform=='p3':
return 7500.0
elif self.platform=='er2':
return 19000.0
elif self.platform=='c130':
return 7500.0
elif self.platform=='dc8':
return 13000.0
else:
return alti
def calc_climb_time(self,alt0,alt1):
"""
Simple program to calculate the climb/descent time from previous missions
Uses parameterization for P3 and ER2 for now.
Default parameters are used when nothing is set.
Uses altitude from previous point (alt0) and next point (alt1) in meters
returns minutes of climb/descent time
"""
if alt1>alt0:
climb = True
if not alt1: climb = False
else:
climb = False
if self.p_info.get('climb_vert_speed'):
if climb:
if alt1>self.p_info['alt_for_variable_vert_speed']:
speed = self.p_info['vert_speed_base']-\
self.p_info['vert_speed_per_alt']*(alt1+alt0)/2.0
else:
speed = self.p_info['climb_vert_speed']
else:
speed = self.p_info['descent_vert_speed']
else:
if self.platform=='p3':
if climb:
if alt1 > 6000:
speed = 4.5-7e-05*(alt1+alt0)/2.0
else:
speed = 5.0
else:
speed = -5.0
elif self.platform=='er2':
if climb:
speed = 24.0-0.0011*(alt1+alt0)/2.0
else:
speed = -10.0
elif self.platform=='dc8':
if climb:
speed = 15.0-0.001*(alt1+alt0)/2.0
else:
speed = -10.0
elif self.platform=='c130':
if climb:
speed = 10.0-0.001*(alt1+alt0)/2.0
else:
speed = -10.0
else:
if climb:
speed = 5.0
else:
speed = -5.0
climb_time = (alt1-alt0)/speed/60.0
if not np.isfinite(climb_time):
climb_time = 5.0
print 'climb time not finite for platform: %s, alt0:%f, alt1:%f' % (self.platform,alt0,alt1)
return climb_time
def calcdatetime(self):
"""
Program to convert a utc time and datestr to datetime object
"""
from datetime import datetime
dt = []
for i,u in enumerate(self.utc):
Y,M,D = [int(s) for s in self.datestr.split('-')]
try:
hh = int(u)
except ValueError:
print 'Problem on line :%i with value %f'%(i,u)
continue
mm = int((u-hh)*60.0)
ss = int(((u-hh)*60.0-mm)*60.0)
ms = int((((u-hh)*60.0-mm)*60.0-ss)*1000.0)
while hh > 23:
hh = hh-24
D = D+1
try:
dt.append(datetime(Y,M,D,hh,mm,ss,ms))
except ValueError:
print 'Problem on line: %i with datetime for datestr: %s' %(i,self.datestr)
print Y,M,D
self.get_datestr_from_xl()
Y,M,D = [int(s) for s in self.datestr.split('-')]
try:
dt.append(datetime(Y,M,D,hh,mm,ss,ms))
except ValueError:
print 'Big problem on 2nd try of calcdatetime with datestr, line: %i'%i
continue
return dt
def time2xl(self):
"""
Convert the UTC fractional hours to hh:mm format for use in excel
"""
self.cumlegt_xl = self.cumlegt/24.0
self.utc_xl = self.utc/24.0
self.local_xl = self.local/24.0
self.legt_xl = self.legt/24.0
def write_to_excel(self):
"""
writes out the dict_position class values to excel spreadsheet
"""
import numpy as np
from xlwings import Range
self.wb.set_current()
Range('A2').value = np.array([self.WP,
self.lat,
self.lon,
self.speed,
self.delayt,
self.alt,
self.cumlegt_xl,
self.utc_xl,
self.local_xl,
self.legt_xl,
self.dist,
self.cumdist,
self.dist_nm,
self.cumdist_nm,
self.speed_kts,
self.alt_kft,
self.sza,
self.azi,
self.bearing,
self.climb_time
]).T
for i,c in enumerate(self.comments):
Range('U%i'%(i+2)).value = c
Range('G2:J%i'% (self.n+1)).number_format = 'hh:mm'
Range('E2:E%i'% (self.n+1)).number_format = '0'
Range('B:B').autofit('c')
Range('C:C').autofit('c')
def check_xl(self):
"""
wrapper for checking excel updates.
Reruns check_updates_excel whenever a line is found to be deleted
"""
while self.check_updates_excel():
if self.verbose:
print 'line removed, cutting it out'
def check_updates_excel(self):
"""
Check for any change in the excel file
If there is change, empty out the corresponding calculated areas
Priority is always given to metric
"""
from xlwings import Range
import numpy as np
self.wb.set_current()
tmp = Range('A2:U%i'%(self.n+1)).value
tmp0 = Range('A2:U2').vertical.value
tmp2 = Range('B2:U2').vertical.value
dim = np.shape(tmp)
if len(dim)==1:
tmp = [tmp]
dim = np.shape(tmp)
dim0 = np.shape(tmp0)
if len(dim0)==1: dim0 = np.shape([tmp0])
n0,_ = dim0
n1,_ = dim
dim2 = np.shape(tmp2)
if len(dim2)==1: dim2 = np.shape([tmp2])
n2,_ = dim2
if n0>n1:
tmp = tmp0
if n2>n0:
tmp2 = Range('A2:U%i'%(n2+1)).value
if len(np.shape(tmp2))==1:
tmp = [tmp2]
else:
tmp = tmp2
if self.verbose:
print 'updated to the longer points on lines:%i' %n2
if self.verbose:
print 'vertical num: %i, range num: %i' %(n0,n1)
num = 0
num_del = 0
for i,t in enumerate(tmp):
if len(t)<16: continue
wp,lat,lon,sp,dt,alt,clt,utc,loc,lt,d,cd,dnm,cdnm,spkt,altk = t[0:16]
try:
sza,azi,bear,clbt,comm = t[16:21]
except:
sza,azi,comm = t[16:19]
if wp > self.n:
num = num+1
self.appends(lat,lon,sp,dt,alt,clt,utc,loc,lt,d,cd,dnm,cdnm,spkt,altk,comm=comm)
elif not wp: # check if empty
if not lat:
num = num+1
self.dels(i)
self.move_xl(i)
self.n = self.n-1
return True
else:
num = num+1
self.appends(lat,lon,sp,dt,alt,clt,utc,loc,lt,d,cd,dnm,cdnm,spkt,altk,comm=comm)
else:
changed = self.mods(i,lat,lon,sp,spkt,dt,alt,altk,comm)
if i == 0:
if self.utc[i] != utc*24.0:
self.utc[i] = utc*24.0
changed = True
if changed: num = num+1
if self.verbose:
print 'Modifying line #%i' %i
if self.n>(i+1):
if self.verbose:
print 'deleting points'
for j in range(i+1,self.n-1):
self.dels(j)
self.n = self.n-1
num = num+1
if num>0:
if self.verbose:
print 'Updated %i lines from Excel, recalculating and printing' % num
self.calculate()
self.write_to_excel()
self.num_changed = num
return False
def move_xl(self,i):
"""
Program that moves up all excel rows by one line overriding the ith line
"""
from xlwings import Range
linesbelow = Range('A%i:U%i'%(i+3,self.n+1)).value
n_rm = (self.n+1)-(i+3)
linelist = False
for j,l in enumerate(linesbelow):
if type(l) is list:
try:
l[0] = l[0]-1
except:
yup = True
linesbelow[j] = l
linelist = True
if not linelist:
try:
linesbelow[0] = linesbelow[0]-1
except:
yup = True
Range('A%i:U%i'%(i+2,i+2)).value = linesbelow
Range('A%i:U%i'%(self.n+1,self.n+1)).clear_contents()
def dels(self,i):
"""
program to remove the ith item in every object
"""
import numpy as np
if i+1>len(self.lat):
print '** Problem: index out of range **'
return
self.lat = np.delete(self.lat,i)
self.lon = np.delete(self.lon,i)
self.speed = np.delete(self.speed,i)
self.delayt = np.delete(self.delayt,i)
self.alt = np.delete(self.alt,i)
self.alt_kft = np.delete(self.alt_kft,i)
self.speed_kts = np.delete(self.speed_kts,i)
self.cumlegt = np.delete(self.cumlegt,i)
self.utc = np.delete(self.utc,i)
self.local = np.delete(self.local,i)
self.legt = np.delete(self.legt,i)
self.dist = np.delete(self.dist,i)
self.cumdist = np.delete(self.cumdist,i)
self.dist_nm = np.delete(self.dist_nm,i)
self.cumdist_nm = np.delete(self.cumdist_nm,i)
self.bearing = np.delete(self.bearing,i)
self.endbearing = np.delete(self.endbearing,i)
self.turn_deg = np.delete(self.turn_deg,i)
self.turn_time = np.delete(self.turn_time,i)
self.climb_time = np.delete(self.climb_time,i)
self.sza = np.delete(self.sza,i)
self.azi = np.delete(self.azi,i)
self.comments.pop(i)
try:
self.WP = np.delete(self.WP,i)
except:
self.WP = range(1,len(self.lon))
#print 'deletes, number of lon left:%i' %len(self.lon)
def appends(self,lat,lon,sp=None,dt=None,alt=None,
clt=None,utc=None,loc=None,lt=None,d=None,cd=None,
dnm=None,cdnm=None,spkt=None,altk=None,
bear=0.0,endbear=0.0,turnd=0.0,turnt=0.0,climbt=0.0,
sza=None,azi=None,comm=None):
"""
Program that appends to the current class with values supplied, or with defaults from the command line
"""
import numpy as np
self.lat = np.append(self.lat,lat)
self.lon = np.append(self.lon,lon)
self.speed = np.append(self.speed,sp)
self.delayt = np.append(self.delayt,dt)
self.alt = np.append(self.alt,alt)
if not clt: clt = np.nan
if not utc: utc = np.nan
if not loc: loc = np.nan
if not lt: lt = np.nan
self.cumlegt = np.append(self.cumlegt,clt*24.0)
self.utc = np.append(self.utc,utc*24.0)
self.local = np.append(self.local,loc*24.0)
self.legt = np.append(self.legt,lt*24.0)
self.dist = np.append(self.dist,d)
self.cumdist = np.append(self.cumdist,cd)
self.dist_nm = np.append(self.dist_nm,dnm)
self.cumdist_nm = np.append(self.cumdist_nm,cdnm)
self.speed_kts = np.append(self.speed_kts,spkt)
self.alt_kft = np.append(self.alt_kft,altk)
self.bearing = np.append(self.bearing,bear)
self.endbearing = np.append(self.endbearing,endbear)
self.turn_deg = np.append(self.turn_deg,turnd)
self.turn_time = np.append(self.turn_time,turnt)
self.climb_time = np.append(self.climb_time,climbt)
self.sza = np.append(self.sza,sza)
self.azi = np.append(self.azi,azi)
self.comments.append(comm)
def inserts(self,i,lat,lon,sp=None,dt=None,alt=None,
clt=None,utc=None,loc=None,lt=None,d=None,cd=None,
dnm=None,cdnm=None,spkt=None,altk=None,
bear=0.0,endbear=0.0,turnd=0.0,turnt=0.0,climbt=0.0,
sza=None,azi=None,comm=None):
"""
Program that appends to the current class with values supplied, or with defaults from the command line
"""
import numpy as np
self.lat = np.insert(self.lat,i,lat)
self.lon = np.insert(self.lon,i,lon)
self.speed = np.insert(self.speed,i,sp)
self.delayt = np.insert(self.delayt,i,dt)
self.alt = np.insert(self.alt,i,alt)
if not clt: clt = np.nan
if not utc: utc = np.nan
if not loc: loc = np.nan
if not lt: lt = np.nan
self.cumlegt = np.insert(self.cumlegt,i,clt*24.0)
self.utc = np.insert(self.utc,i,utc*24.0)
self.local = np.insert(self.local,i,loc*24.0)
self.legt = np.insert(self.legt,i,lt*24.0)
self.dist = np.insert(self.dist,i,d)
self.cumdist = np.insert(self.cumdist,i,cd)
self.dist_nm = np.insert(self.dist_nm,i,dnm)
self.cumdist_nm = np.insert(self.cumdist_nm,i,cdnm)
self.speed_kts = np.insert(self.speed_kts,i,spkt)
self.alt_kft = np.insert(self.alt_kft,i,altk)
self.bearing = np.insert(self.bearing,i,bear)
self.endbearing = np.insert(self.endbearing,i,endbear)
self.turn_deg = np.insert(self.turn_deg,i,turnd)
self.turn_time = np.insert(self.turn_time,i,turnt)
self.climb_time = np.insert(self.climb_time,i,climbt)
self.sza = np.insert(self.sza,i,sza)
self.azi = np.insert(self.azi,i,azi)
self.comments.insert(i,comm)
def mods(self,i,lat=None,lon=None,sp=None,spkt=None,
dt=None,alt=None,altk=None,comm=None):
"""
Program to modify the contents of the current class if
there is an update on the line, defned by i
If anything is not input, then the default of NaN is used
comments are treated as none
"""
import numpy as np
if i+1>len(self.lat):
print '** Problem with index too large in mods **'
return
changed = False
compare_altk = True
compare_speedk = True
self.toempty = {'speed':0,'delayt':0,'alt':0,'speed_kts':0,'alt_kft':0}
if lat is None: lat = np.nan
if lon is None: lon = np.nan
if sp is None: sp = np.nan
if spkt is None: spkt = np.nan
if dt is None: dt = np.nan
if alt is None: alt = np.nan
if altk is None: altk = np.nan
if self.lat[i] != lat:
self.lat[i] = lat
changed = True
if self.lon[i] != lon:
self.lon[i] = lon
changed = True
if self.speed[i] != sp:
if np.isfinite(sp):
self.speed[i] = sp
self.toempty['speed_kts'] = 1
compare_speedk = False
changed = True
if self.speed_kts[i] != spkt:
if np.isfinite(spkt)&compare_speedk:
self.speed_kts[i] = spkt
self.toempty['speed'] = 1
changed = True
if self.delayt[i] != dt:
if i != 0:
self.delayt[i] = dt
changed = True
if self.alt[i] != alt:
if np.isfinite(alt):
self.alt[i] = alt
self.toempty['alt_kft'] = 1
compare_altk = False
changed = True
if self.alt_kft[i] != altk:
if np.isfinite(altk)&compare_altk:
self.alt_kft[i] = altk
self.toempty['alt'] = 1
changed = True
for s in self.toempty:
if self.toempty.get(s):
v = getattr(self,s)
v[i] = np.nan
setattr(self,s,v)
if not self.comments[i] == comm:
if comm:
self.comments[i] = comm
changed = True
return changed
def Open_excel(self,filename=None,sheet_num=1,campaign='None',platform_file='platform.txt'):
"""
Purpose:
Program that opens and excel file and creates the proper links with pytho
Inputs:
filename of excel file to open
sheet_num: what sheet to activate and load
campaign: (optional) if set, does ask to verify the campaign for each sheet
Outputs:
wb: workbook instance
Dependencies:
xlwings
Excel (win or mac)
re
tkSimpleDialog (for datestr)
datetime
Example:
...
History:
Written: Samuel LeBlanc, 2015-08-18, NASA Ames, CA
Modified: Samuel LeBlanc, 2016-06-07, NASA Ames, CA
- updated to handle the new excel format with climb time and bearing
- added datestr checking and dialog interface
Modified: Samuel LeBlanc, 2016-07-28, NASA Ames, CA
- updated to handle the platform file definitions and check on utc_conversion factor
"""
from xlwings import Workbook, Sheet, Range
import numpy as np
if not filename:
print 'No filename found'
return
try:
wb = Workbook(filename)
except Exception,ie:
print 'Exception found:',ie
return
self.name = Sheet(sheet_num).name
Sheet(sheet_num).activate()
print 'Activating sheet:%i, name:%s'%(sheet_num,Sheet(sheet_num).name)
self.platform, self.p_info,use_file = self.get_platform_info(self.name,platform_file)
print 'Using platform data for: %s' %self.platform
self.datestr = str(Range('W1').value).split(' ')[0]
self.verify_datestr()
if campaign is not 'None':
self.campaign
else:
self.campaign = str(Range('X1').value).split(' ')[0]
self.verify_campaign()
self.UTC_conversion = self.verify_UTC_conversion()
return wb
def verify_datestr(self):
'Verify the input datestr is correct'
import re
import tkSimpleDialog
if not self.datestr:
self.datestr = tkSimpleDialog.askstring('Flight Date','No datestring found!\nPlease input Flight Date (yyyy-mm-dd):')
if not re.match('[0-9]{4}-[0-9]{2}-[0-9]{2}',self.datestr):
self.datestr = tkSimpleDialog.askstring('Flight Date','No datestring found!\nPlease input Flight Date (yyyy-mm-dd):')
if not self.datestr:
print 'No datestring found! Using todays date'
from datetime import datetime
self.datestr = datetime.utcnow().strftime('%Y-%m-%d')
def verify_campaign(self):
'verify the input campaign value'
import tkSimpleDialog
self.campaign = tkSimpleDialog.askstring('Campaign name','Please verify campaign name:',initialvalue=self.campaign)
def verify_UTC_conversion(self):
'verify the input UTC conversion when reading a excel file'
from xlwings import Range
tmp0 = Range('A2:U2').value
_,_,_,_,_,_,_,utc,loc,_,_,_,_,_,_,_ = tmp0[0:16]
return loc*24-utc*24
def Create_excel(self,name='P3 Flight path',newsheetonly=False):
"""
Purpose:
Program that creates the link to an excel file
Starts and populates the first line and titles of the excel workbook
Inputs:
none
Outputs:
wb: workbook instance
Dependencies:
xlwings
Excel (win or mac)
Required files:
none
Example:
...
Modification History:
Written: Samuel LeBlanc, 2015-07-15, Santa Cruz, CA
Modified: Samuel LeBlanc, 2015-08-07, Santa Cruz, CA
- put into the dic_position class, modified slightly
Modified: Samuel LeBlanc, 2015-08-25, NASA Ames, CA
- modify to permit creation of a new sheet within the current workbook
"""
from xlwings import Workbook, Sheet, Range, Chart
import numpy as np
if newsheetonly:
Sheet(1).add(name=name)
self.sheet_num = self.sheet_num+1
wb = Workbook.current()
else:
wb = Workbook()
self.name = name
Sheet(1).name = self.name
Range('A1').value = ['WP','Lat\n[+-90]','Lon\n[+-180]',
'Speed\n[m/s]','delayT\n[min]','Altitude\n[m]',
'CumLegT\n[hh:mm]','UTC\n[hh:mm]','LocalT\n[hh:mm]',
'LegT\n[hh:mm]','Dist\n[km]','CumDist\n[km]',
'Dist\n[nm]','CumDist\n[nm]','Speed\n[kt]',
'Altitude\n[kft]','SZA\n[deg]','AZI\n[deg]',
'Bearing\n[deg]','ClimbT\n[min]','Comments']
top_line = Range('A1').horizontal
address = top_line.get_address(False,False)
from sys import platform
if platform.startswith('win'):
from win32com.client import Dispatch
xl = Dispatch("Excel.Application")
# xl.ActiveWorkbook.Windows(1).SplitColumn = 0.4
xl.ActiveWorkbook.Windows(1).SplitRow = 1.0
xl.Range(address).Font.Bold = True
top_line.autofit()
Range('G2:J2').number_format = 'hh:mm'
Range('W1').value = self.datestr
Range('X1').value = self.campaign
Range('Z1').value = 'Created with'
Range('Z2').value = 'moving_lines'
Range('Z3').value = self.__version__
Range('W:W').autofit('c')
Range('X:X').autofit('c')
Range('Z:Z').autofit('c')
#Range('A2').value = np.arange(50).reshape((50,1))+1
return wb
def switchsheet(self,i):
'Switch the active sheet with name supplied'
from xlwings import Sheet
Sheet(i+1).activate()
def save2xl(self,filename=None):
"""
Simple to program to initiate the save function in Excel
Same as save button in Excel
"""
self.wb.save(filename)
def get_datestr_from_xl(self):
'Simple program to get the datestr from the excel spreadsheet'
self.datestr = str(Range('W1').value).split(' ')[0]
def save2txt(self,filename=None):
"""
Simple method to save the points to a text file.
For input with idl and matlab
"""
f = open(filename,'w+')
f.write('#WP Lon[+-180] Lat[+-90] Speed[m/s] delayT[min] Altitude[m]'+
' CumLegT[H] UTC[H] LocalT[H]'+
' LegT[H] Dist[km] CumDist[km]'+
' Dist[nm] CumDist[nm] Speed[kt]'+
' Altitude[kft] SZA[deg] AZI[deg] Bearing[deg] Climbt[min] Comments\n')
for i in xrange(self.n):
f.write("""%-2i %+2.8f %+2.8f %-4.2f %-3i %-5.1f %-2.2f %-2.2f %-2.2f %-2.2f %-5.1f %-5.1f %-5.1f %-5.1f %-3.1f %-3.2f %-3.1f %-3.1f %-3.1f %-3i %s \n""" %(
i+1,self.lon[i],self.lat[i],self.speed[i],
self.delayt[i],self.alt[i],self.cumlegt[i],
self.utc[i],self.local[i],self.legt[i],
self.dist[i],self.cumdist[i],self.dist_nm[i],self.cumdist_nm[i],
self.speed_kts[i],self.alt_kft[i],self.sza[i],self.azi[i],self.bearing[i],self.climb_time[i],self.comments[i]))
def save2kml(self,filename=None):
"""
Program to save the points contained in the spreadsheet to a kml file
"""
import simplekml
from xlwings import Sheet
if not filename:
raise NameError('filename not defined')
return
if not self.netkml:
self.netkml = simplekml.Kml(open=1)
self.netkml.name = 'Flight plan on '+self.datestr
net = self.netkml.newnetworklink(name=self.datestr)
net.link.href = filename
net.link.refreshmode = simplekml.RefreshMode.onchange
net.link.camera = simplekml.Camera(latitude=self.lat[0], longitude=self.lon[0], altitude=3000.0, roll=0, tilt=0,
altitudemode=simplekml.AltitudeMode.relativetoground)
filenamenet = filename+'_net.kml'
#self.netkml.save(filenamenet)
self.kml = simplekml.Kml()
for j in xrange(Sheet.count()):
self.switchsheet(j)
self.name = Sheet(j+1).name
self.check_xl()
self.calculate()
self.kmlfolder = self.kml.newfolder(name=self.name)
#self.kml.document = simplekml.Folder(name = self.name)
self.print_points_kml(self.kmlfolder)
self.print_path_kml(self.kmlfolder,color=self.color,j=j)
self.kml.camera = simplekml.Camera(latitude=self.lat[0], longitude=self.lon[0], altitude=3000.0, roll=0, tilt=0,
altitudemode=simplekml.AltitudeMode.relativetoground)
self.kml.save(filename)
try:
self.kml.savekmz(filename.replace('kml','kmz'))
except:
print 'saving kmz didnt work'
if not self.googleearthopened:
#self.openGoogleEarth(filenamenet)
try:
self.openGoogleEarth(filename.replace('kml','kmz'))
self.googleearthopened = True
except:
print 'Not able to open google earth'
self.googleearthopened = True
def print_points_kml(self,folder):
"""
print the points saved in lat, lon
"""
import simplekml
from excel_interface import get_curdir
if not self.kml:
raise NameError('kml not initilaized')
return
for i in xrange(self.n):
pnt = folder.newpoint()
pnt.name = 'WP # {}'.format(self.WP[i])
pnt.coords = [(self.lon[i],self.lat[i],self.alt[i]*10.0)]
pnt.altitudemode = simplekml.AltitudeMode.relativetoground
pnt.extrude = 1
try:
path = self.kml.addfile(get_curdir()+'//map_icons//number_{}.png'.format(self.WP[i]))
pnt.style.iconstyle.icon.href = path
except:
pnt.style.iconstyle.icon.href = get_curdir()+'//map_icons//number_{}.png'.format(self.WP[i])
pnt.description = """UTC[H]=%2.2f\nLocal[H]=%2.2f\nCumDist[km]=%f\nspeed[m/s]=%4.2f\ndelayT[min]=%f\nSZA[deg]=%3.2f\nAZI[deg]=%3.2f\nBearing[deg]=%3.2f\nClimbT[min]=%f\nComments:%s""" % (self.utc[i],self.local[i],self.cumdist[i],
self.speed[i],self.delayt[i],self.sza[i],
self.azi[i],self.bearing[i],self.climb_time[i],self.comments[i])
def print_path_kml(self,folder,color='red',j=0):
"""
print the path onto a kml file
"""
import simplekml
import numpy as np
cls = [simplekml.Color.red,simplekml.Color.blue,simplekml.Color.green,simplekml.Color.cyan,
simplekml.Color.magenta,simplekml.Color.yellow,simplekml.Color.black,simplekml.Color.lightcoral,
simplekml.Color.teal,simplekml.Color.darkviolet,simplekml.Color.orange]
path = folder.newlinestring(name=self.name)
coords = [(lon,lat,alt*10) for (lon,lat,alt) in np.array((self.lon,self.lat,self.alt)).T]
path.coords = coords
path.altitudemode = simplekml.AltitudeMode.relativetoground
path.extrude = 1
path.style.linestyle.color = cls[j]
path.style.linestyle.width = 4.0
def openGoogleEarth(self,filename=None):
"""
Function that uses either COM object or appscript (not yet implemented)
to load the new Google Earth kml file
"""
if not filename:
print 'no filename defined, returning'
return
from sys import platform
from os import startfile
if platform.startswith('win'):
try:
from win32com.client import Dispatch
ge = Dispatch("GoogleEarth.ApplicationGE")
ge.OpenKmlFile(filename,True)
except:
startfile(filename)
else:
startfile(filename)
def save2gpx(self,filename=None):
'Program to save the waypoints and track in gpx format'
if not filename:
print '** no filename selected, returning without saving **'
return
import gpxpy as g
import gpxpy.gpx as gg
f = gg.GPX()
route = gg.GPXRoute(name=self.datestr)
for i,w in enumerate(self.WP):
rp = gg.GPXRoutePoint(name='WP#%i'%w,latitude=self.lat[i],
longitude=self.lon[i],
elevation = self.alt[i],
time = self.utc2datetime(self.utc[i]),
comments = self.comments[i]
)
route.points.append(rp)
f.routes.append(route)
fp = open(filename,'w')
fp.write(f.to_xml())
fp.close()
print 'GPX file saved to:'+filename
def save2ict(self,filepath=None):
'Program to save the flight track as simulated ict file. Similar to what is returned from flights'
from datetime import datetime
import getpass
import re
if not filepath:
print '** no filepath selected, returning without saving **'
return
dt = 60 #seconds
# setup data dict
dict_in = {'Start_UTC':{'original_data':self.utc*3600.0,'unit':'seconds from midnight UTC','long_description':'time keeping'},
'Latitude':{'original_data':self.lat,'unit':'Degrees (North positive)','long_description':'Planned latitude position of the aircraft','format':'4.9f'},
'Longitude':{'original_data':self.lon,'unit':'Degrees (East positive)','long_description':'Planned longitude position of the aircraft','format':'4.9f'},
'Altitude':{'original_data':self.alt,'unit':'meters (above sea level)','long_description':'Planned altitude of the aircraft','format':'5.0f'},
'speed':{'original_data':self.speed,'unit':'meters per second (m/s)','long_description':'Estimated speed of aircraft'},
'SZA':{'original_data':self.sza,'unit':'degrees from zenith','long_description':'Elevation position of the sun in the sky per respect to zenith'},
'AZI':{'original_data':self.azi,'unit':'degrees from north','long_description':'Azimuthal position of the sun in the sky per respect to north'},
'Bearing':{'original_data':self.bearing,'unit':'degrees from north','long_description':'Direction of travel of the plane per respect to north'}}
d_dict = self.interp_points_for_ict(dict_in,dt=dt)
# setup header dict
hdict = {'PI':getpass.getuser(),
'Institution':'NASA Ames Research Center',
'Instrument':'Simulated flight plan',
'campaign':self.campaign,
'time_interval':dt,
'now':datetime.strptime(self.datestr,'%Y-%m-%d'),
'special_comments':'Simulated aircraft data interpolated from flight plan waypoints',
'PI_contact':getpass.getuser(),
'platform':self.platform,
'location':'N/A',
'instrument_info':'None',
'data_info':'Compiled with flight planner: moving lines {version}'.format(version=self.__version__),
'uncertainty':'Undefined',
'DM_contact':'Samuel LeBlanc, samuel.leblanc@nasa.gov',
'project_info':self.campaign,
'stipulations':'None',
'rev_comments':""" RA: First iteration of the flight plan"""}
order = ['Latitude','Longitude','Altitude','speed','Bearing','SZA','AZI']
fcomment = self.name.upper().replace(self.platform.upper(),'').strip('_').strip('-').strip()
rev = get_next_revision(filepath+'//'+'{data_id}_{loc_id}_{date}_{rev}{file_comment}.ict'.format(data_id='{}-Flt-plan'.format(self.campaign),loc_id=self.platform,
date=self.datestr.replace('-',''),rev='R?',file_comment=fcomment))
if hdict['rev_comments'].find(rev)<0:
num = ord(rev[1].lower())-ord('a')+1
hdict['rev_comments'] = """ {}: Version {} of the flight plan ict \n""".format(rev,num)+hdict['rev_comments']
wu.write_ict(hdict,d_dict,filepath=filepath+'//',
data_id='{}-Flt-plan'.format(self.campaign),loc_id=self.platform,
date=self.datestr.replace('-',''),rev=rev,order=order,file_comment=fcomment)
def interp_points_for_ict(self,dict_in,dt=60.0):
'Program to interpolate between the waypoints to have a consistent time, defined by dt (defaults to 60 seconds), the variables to be interpolated is defined by dict_in'
utcs = np.arange(self.utc[0]*3600,self.utc[-1]*3600,dt)
# create a dict of points using the input dict as a basis, requires it to have the original_data key for each dict entry
# should be replaced by a interpolator that uses great circles
for k in dict_in.keys():
if k=='Start_UTC':
dict_in[k]['data'] = utcs
else:
fx = interpolate.interp1d(self.utc*3600,dict_in[k]['original_data'],bounds_error=False)
dict_in[k]['data'] = fx(utcs)
return dict_in
def utc2datetime(self,utc):
'Program to convert the datestr and utc to valid datetime class'
from datetime import datetime
y,m,d = self.datestr.split('-')
year = int(y)
month = int(m)
day = int(d)
hour = int(utc)
minut = (utc-hour)*60
minutes = int(minut)
secon = (minut-minutes)*60
seconds = int(secon)
microsec = int((secon-seconds)*100)
return datetime(year,month,day,hour,minutes,seconds,microsec)
def exremove(self):
'Program to remove the current Sheet'
print 'Not yet'
pass
def get_next_revision(fname):
'Program that returns the next revision value for a given filename of ict file'
import os, glob
a = []
for f in glob.glob(fname):
a.append(f)
if len(a)==0:
return 'RA'
a.sort()
b = a[-1]
rev = os.path.basename(b.strip('.ict')).split('_')[3]
newrev = rev[0]+chr(ord(rev[1])+1)
return newrev
def populate_ex_arr(filename=None,colorcycle=['red','blue','green']):
"""
Purpose:
Program that opens an excel file, and runs through the sheets
creates an array of dict_position
Input:
filename of excel file
colorcycle
Output:
excel_interface dict_position array
Dependeices:
xlwings
History:
written: Samuel LeBlanc, NASA Ames, Santa Cruz, CA 2015-09-10
"""
from xlwings import Workbook,Sheet
import excel_interface as ex
arr = []
wb = Workbook(filename)
num = Sheet.count()
for i in range(num):
if i==0:
campaign = 'None'
else:
campaign = arr[i-1].campaign
arr.append(ex.dict_position(filename=filename,sheet_num=i+1,color=colorcycle[i],campaign='None'))
return arr
def save2xl_for_pilots(filename,ex_arr):
"""
Purpose:
Program that opens and saves a new excel file, and runs through the current opened sheets
creates an excel file in the format defined for pilots. format option is defined in the dict_position
Input:
filename: filename of new excel file
ex_arr: array of excel interface dict_position to be saved
Output:
new file
Dependices:
xlwings
History:
written: Samuel LeBlanc, NASA Ames, CA 2016-07-28
"""
from xlwings import Workbook,Sheet,Range
from excel_interface import format_lat_lon
wb_pilot = Workbook()
sheet_one = True
for a in ex_arr:
if sheet_one:
Sheet(1).name = a.name
sheet_one = False
else:
Sheet(1).add(name=a.name)
Range('A1').value = ['WP','Lat\n[+-90]','Lon\n[+-180]',
'Altitude\n[kft]','Comments']
top_line = Range('A1').horizontal
address = top_line.get_address(False,False)
from sys import platform
if platform.startswith('win'):
from win32com.client import Dispatch
xl = Dispatch("Excel.Application")
xl.ActiveWorkbook.Windows(1).SplitRow = 1.0
xl.Range(address).Font.Bold = True
top_line.autofit()
Range('G2:J2').number_format = 'hh:mm'
Range('W1').value = a.datestr
Range('X1').value = a.campaign
Range('Z1').value = 'Created with'
Range('Z2').value = 'moving_lines'
Range('Z3').value = a.__version__
Range('W:W').autofit('c')
Range('X:X').autofit('c')
Range('Z:Z').autofit('c')
for i in range(len(a.lon)):
lat_f,lon_f = format_lat_lon(a.lat[i],a.lon[i],format=a.pilot_format)
if a.delayt[i]>3.0:
comment = 'delay: {} min, {}'.format(a.delayt[i],a.comments[i])
else:
comment = a.comments[i]
Range('A{:d}'.format(i+2)).value = [a.WP[i],lat_f,lon_f,a.alt_kft[i],comment]
Range('A{:d}'.format(i+4)).value = 'One line waypoints for foreflight:'
Range('A{:d}'.format(i+5)).value = one_line_points(a)
wb_pilot.save(filename)
try:
wb_pilot.close()
except:
print '** unable to close for_pilots spreadsheet, may need to close manually **'
def format_lat_lon(lat,lon,format='DD MM SS'):
'Lat and lon formatter'
if format == 'DD MM SS':
def deg_to_dms(deg):
d = int(deg)
md = abs(deg - d) * 60
m = int(md)
sd = (md - m) * 60
return [d, m, sd]
latv = deg_to_dms(lat)
lonv = deg_to_dms(lon)
lat_f = '{:02d} {:02d} {:02.3f}'.format(latv[0],latv[1],latv[2])
lon_f = '{:02d} {:02d} {:02.3f}'.format(lonv[0],lonv[1],lonv[2])
if format == 'DD MM':
def deg_to_dm(deg):
d = int(deg)
md = abs(deg - d) * 60
return [d, md]
latv = deg_to_dm(lat)
lonv = deg_to_dm(lon)
lat_f = '{:02d} {:02.3f}'.format(latv[0],latv[1])
lon_f = '{:02d} {:02.3f}'.format(lonv[0],lonv[1])
return lat_f,lon_f
def one_line_points(a):
'Fromatting all waypoints onto one line for foreflight'
def deg_to_dm(deg):
d = int(deg)
md = abs(deg - d) * 60
return [d, md]
str = ''
for i in range(len(a.lon)):
latv = deg_to_dm(a.lat[i])
lonv = deg_to_dm(a.lon[i])
lat_f = '{n}{:02d}{:06.3f}'.format(abs(latv[0]),latv[1],n='N' if latv[0]>0 else 'S')
lon_f = '{n}{:02d}{:06.3f}'.format(abs(lonv[0]),lonv[1],n='E' if lonv[0]>0 else 'W')
str = str+lat_f+'/'+lon_f+' '
return str.rstrip()
def get_curdir():
'Program that gets the path of the script: for use in finding extra files'
from os.path import dirname, realpath
from sys import argv
if __file__:
path = dirname(realpath(__file__))
else:
path = dirname(realpath(argv[0]))
return path
|
import sys
from osgeo.utils.gdal_polygonize import * # noqa
from osgeo.utils.gdal_polygonize import main
from osgeo.gdal import deprecation_warn
deprecation_warn('gdal_polygonize', 'utils')
sys.exit(main(sys.argv))
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scoping', '0199_technology_group'),
]
operations = [
migrations.CreateModel(
name='DocStatement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('start', models.IntegerField()),
('end', models.IntegerField()),
('text_length', models.IntegerField(null=True)),
('doc', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scoping.Doc')),
('tag', models.ManyToManyField(to='scoping.Tag')),
('technology', models.ManyToManyField(db_index=True, to='scoping.Technology')),
],
),
]
|
from __future__ import unicode_literals
import os
import sys
sys.path.append(os.curdir)
from baseconf import *
from collections import OrderedDict
SITENAME = u'Python Norte'
AUTHOR = u'PyNorte'
THEME = "themes/malt"
MALT_BASE_COLOR = "blue-grey"
META_DESCRIPTION = '''O PyNorte é um grupo de usuários (profissionais e
amadores) da linguagem Python, onde prezamos pela troca de
conhecimento, respeito mútuo e diversidade (tanto de opinião
quanto de tecnologias).'''
META_KEYWORDS = ['pynorte', 'python', 'amazonas', 'desenvolvimento',
'acre', 'para', 'tocantins', 'rondonia', 'roraima', 'amapa']
GITHUB_REPO = "https://github.com/lskbr/grupybr-pynorte"
GITHUB_BRANCH = "master"
ARTICLE_BANNERS_FOLDER = "images/banners"
SITE_LOGO = "images/logo.png"
SITE_LOGO_MOBILE = "images/logo-mobile.png"
WELCOME_TITLE = "Seja bem vindo ao {}".format(SITENAME)
WELCOME_TEXT = "Grupo de usuários da linguagem Python do Norte do Brasil."
SITE_BACKGROUND_IMAGE = "images/banners/background.png"
FOOTER_ABOUT = "O Grupo Python Norte é uma comunidade de usuários do Acre, Amapá, Amazonas, Pará, Rondônia, Roraima e Tocantins"
PYGMENTS_STYLE = "perldoc"
NAVBAR_HOME_LINKS = [
# {
# "title": "Comunidade",
# "href": "comunidade",
# },
{
"title": "Membros",
"href": "membros",
},
{
"title": "Blog",
"href": "blog",
},
]
NAVBAR_BLOG_LINKS = NAVBAR_HOME_LINKS + [
{
"title": "Categorias",
"href": "blog/categorias",
},
{
"title": "Autores",
"href": "blog/autores",
},
{
"title": "Tags",
"href": "blog/tags",
},
]
SOCIAL_LINKS = (
{
"href": "https://telegram.me/joinchat/COYq6T90D2FTAkqkTH3GqA",
"icon": "fa-paper-plane",
"text": "Telegram",
},
{
"href": "https://groups.google.com/d/forum/pynorte",
"icon": "fa-envelope",
"text": "Lista de e-mail",
},
{
"href": "https://github.com/grupydf",
"icon": "fa-github",
"text": "Grupy-DF",
},
{
"href": "http://wiki.python.org.br/",
"icon": "fa-globe",
"text": "Python Brasil",
},
{
"href": "https://python.org",
"icon": "fa-globe",
"text": "Python",
},
{
"href": "http://www.pythonclub.com.br/",
"icon": "fa-globe",
"text": "PythonClub",
},
{
"href": "http://dojoto.info/",
"icon": "fa-globe",
"text": "CodingDojoTocantins"
},
)
MEMBROS = OrderedDict((
("Nilo Menezes", {
"twitter": "@lskbr",
"github": "lskbr",
"site": {
"nome": "Nilo Menezes",
"href": "http://www.nilo.pro.br",
}
}),
("Adriano Praia", {
"github": "adrianopraia",
}),
("João Soares", {
"github": "joaosr",
"twitter": "@joao_mnl"
}),
("Marcos Thomaz", {
"twitter": "@marcosthomazs",
"github": "thomazs",
"site": {
"nome": "Marcos Thomaz da Silva",
"href": "https://br.linkedin.com/in/marcosthomaz",
}
}),
("Felipe Colen", {
"twitter": "@felipecolen",
"github": "felipecolen",
"site": {
"nome": "Felipe Oliveira Colen",
"href": "https://br.linkedin.com/in/felipecolen",
}
}),
("Marcos Duran", {
"twitter": "@mdzain",
"github": "zapduran",
"site": {
"nome": "Marcos Duran",
"href": "http://www.mdzain.com/",
}
}),
("Breno Thales", {
"twitter": "@brenothales",
"github": "brenothales",
"site": {
"nome": "Breno Thales",
"href": "https://br.linkedin.com/in/breno-thales-2aa8631b/pt",
}
}),
))
MALT_HOME = [
{
"color": "blue-grey lighten-5",
"title": "O que Fazemos?",
"items": [
{
"title": "Comunidade",
"icon": "fa-comments",
"text": "A comunidade PyNorte se comunica através de mailing " +\
"lists, grupo no telegram e ocasionalmente são " +\
"promovidos encontros diversos, como almoços, " +\
"<em>coding dojos</em>, hangouts e palestras. ",
# "buttons": [
# {
# "text": "Saiba Mais",
# "href": "comunidade",
# },
# ],
},
{
"title": "Membros",
"icon": "fa-users",
"text": "A comunidade PyNorte inicia sua organização, mas já possui alguns " +\
"colaboradores, responsáveis por organizar " +\
"eventos, manter a comunicação ativa, divulgar eventos, " +\
"redes sociais e etc. ",
"buttons": [
{
"text": "Conheça",
"href": "membros",
},
],
},
{
"title": "Entre em Contato",
"icon": "fa-paper-plane",
"text": "Deseja participar? Sugerir uma atividade ou simplesmente acompanhar o grupo?"
" Contacte-nos via Telegram.",
"buttons": [
{
"text": "Telegram",
"href": "https://telegram.me/joinchat/COYq6QM8RkebVUVK1WxRHQ",
},
]
},
# {
# "title": "Projetos",
# "icon": "fa-briefcase",
# "text": " Atualmente o PyNorte possui poucos projetos em andamento:" +\
# "Traduções do Django-docs e Python on Campus.",
# "buttons": [
# {
# "text": "Mais detalhes",
# "href": "projetos",
# },
# ],
# },
]
},
# {
# "color": "blue-grey lighten-4",
# "title": "Nosso Projetos",
# "items": [
# {
# "title": "MIG-29",
# "icon": "fa-fighter-jet",
# "text": "MIG-29 é um caça Russo cujo projeto original visava" +\
# "superar o F-22 Raptor",
# "buttons": [
# {
# "text": "Código Fonte",
# "href": "#",
# },
# {
# "text": "Wiki",
# "href": "#",
# },
# ]
# },
# {
# "title": "SNES",
# "icon": "fa-gamepad",
# "text": "O Super Nintendo Entertainment Systems visa superar" +\
# "o sucesso de seu antecessor, o NES.",
# "buttons": [
# {
# "text": "Site",
# "href": "#",
# },
# {
# "text": "Comprar",
# "href": "#",
# },
# ]
# }
# ]
# },
# {
# "color": "blue-grey lighten-5",
# "title": "Entre em Contato",
# "items": [
# {
# "title": "",
# },
# {
# "icon": "fa-envelope",
# "buttons": [
# {
# "text": "Envie um e-mail!",
# "href": "#",
# },
# ]
# }
# ]
# }
] # end MALT_HOME
from themes.malt.functions import *
|
import sys, locale
expressions = """
locale.getpreferredencoding()
type(my_file)
my_file.encoding
sys.stdout.isatty()
sys.stdout.encoding
sys.stdin.isatty()
sys.stdin.encoding
sys.stderr.isatty()
sys.stderr.encoding
sys.getdefaultencoding()
sys.getfilesystemencoding()
"""
my_file = open('dummy', 'w')
for expression in expressions.split():
value = eval(expression)
print(expression.rjust(30), '->', repr(value))
|
import bpy
from PyHSPlasma import *
import weakref
from .explosions import *
from . import utils
_BL2PL = {
"AREA": plLimitedDirLightInfo,
"POINT": plOmniLightInfo,
"SPOT": plSpotLightInfo,
"SUN": plDirectionalLightInfo,
}
_FAR_POWER = 15.0
class LightConverter:
def __init__(self, exporter):
self._exporter = weakref.ref(exporter)
self._converter_funcs = {
"AREA": self._convert_area_lamp,
"POINT": self._convert_point_lamp,
"SPOT": self._convert_spot_lamp,
"SUN": self._convert_sun_lamp,
}
def _convert_attenuation(self, bl, pl):
# If you change these calculations, be sure to update the AnimationConverter!
intens, attenEnd = self.convert_attenuation(bl)
if bl.falloff_type == "CONSTANT":
self._report.msg("Attenuation: No Falloff", indent=2)
pl.attenConst = intens
pl.attenLinear = 0.0
pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_LINEAR":
self._report.msg("Attenuation: Inverse Linear", indent=2)
pl.attenConst = 1.0
pl.attenLinear = self.convert_attenuation_linear(intens, attenEnd)
pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_SQUARE":
self._report.msg("Attenuation: Inverse Square", indent=2)
pl.attenConst = 1.0
pl.attenLinear = 0.0
pl.attenQuadratic = self.convert_attenuation_quadratic(intens, attenEnd)
pl.attenCutoff = attenEnd
else:
raise BlenderOptionNotSupportedError(bl.falloff_type)
def convert_attenuation(self, lamp):
intens = abs(lamp.energy)
attenEnd = lamp.distance if lamp.use_sphere else lamp.distance * 2
return (intens, attenEnd)
def convert_attenuation_linear(self, intensity, end):
return max(0.0, (intensity * _FAR_POWER - 1.0) / end)
def convert_attenuation_quadratic(self, intensity, end):
return max(0.0, (intensity * _FAR_POWER - 1.0) / pow(end, 2))
def _convert_area_lamp(self, bl, pl):
self._report.msg("[LimitedDirLightInfo '{}']", bl.name, indent=1)
pl.width = bl.size
pl.depth = bl.size if bl.shape == "SQUARE" else bl.size_y
pl.height = bl.plasma_lamp.size_height
def _convert_point_lamp(self, bl, pl):
self._report.msg("[OmniLightInfo '{}']", bl.name, indent=1)
self._convert_attenuation(bl, pl)
def _convert_spot_lamp(self, bl, pl):
self._report.msg("[SpotLightInfo '{}']", bl.name, indent=1)
self._convert_attenuation(bl, pl)
# Spot lights have a few more things...
spot_size = bl.spot_size
pl.spotOuter = spot_size
blend = max(0.001, bl.spot_blend)
pl.spotInner = spot_size - (blend*spot_size)
if bl.use_halo:
pl.falloff = bl.halo_intensity
else:
pl.falloff = 1.0
def _convert_sun_lamp(self, bl, pl):
self._report.msg("[DirectionalLightInfo '{}']", bl.name, indent=1)
def export_rtlight(self, so, bo):
bl_light = bo.data
# The specifics be here...
pl_light = self.get_light_key(bo, bl_light, so).object
self._converter_funcs[bl_light.type](bl_light, pl_light)
# Light color nonsense
# Please note that these calculations are duplicated in the AnimationConverter
energy = bl_light.energy
if bl_light.use_negative:
diff_color = [(0.0 - i) * energy for i in bl_light.color]
spec_color = [(0.0 - i) for i in bl_light.color]
else:
diff_color = [i * energy for i in bl_light.color]
spec_color = [i for i in bl_light.color]
diff_str = "({:.4f}, {:.4f}, {:.4f})".format(*diff_color)
diff_color.append(energy)
spec_str = "({:.4f}, {:.4f}, {:.4f})".format(*spec_color)
spec_color.append(energy)
# Do we *only* want a shadow?
shadow_only = bl_light.shadow_method != "NOSHADOW" and bl_light.use_only_shadow
# Apply the colors
if bl_light.use_diffuse and not shadow_only:
self._report.msg("Diffuse: {}", diff_str, indent=2)
pl_light.diffuse = hsColorRGBA(*diff_color)
else:
self._report.msg("Diffuse: OFF", indent=2)
pl_light.diffuse = hsColorRGBA(0.0, 0.0, 0.0, energy)
if bl_light.use_specular and not shadow_only:
self._report.msg("Specular: {}", spec_str, indent=2)
pl_light.setProperty(plLightInfo.kLPHasSpecular, True)
pl_light.specular = hsColorRGBA(*spec_color)
else:
self._report.msg("Specular: OFF", indent=2)
pl_light.specular = hsColorRGBA(0.0, 0.0, 0.0, energy)
rtlamp = bl_light.plasma_lamp
has_lg = rtlamp.has_light_group(bo)
if has_lg:
pl_light.setProperty(plLightInfo.kLPHasIncludes, True)
pl_light.setProperty(plLightInfo.kLPIncludesChars, rtlamp.affect_characters)
if rtlamp.cast_shadows:
self._export_shadow_master(bo, rtlamp, pl_light)
pl_light.setProperty(plLightInfo.kLPShadowOnly, shadow_only)
if self.mgr.getVer() != pvPrime:
pl_light.setProperty(plLightInfo.kLPShadowLightGroup, has_lg)
# AFAICT ambient lighting is never set in PlasmaMax...
# If you can think of a compelling reason to support it, be my guest.
pl_light.ambient = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# Now, let's apply the matrices...
# Science indicates that Plasma RT Lights should *always* have mats, even if there is a CI
l2w = utils.matrix44(bo.matrix_local)
pl_light.lightToWorld = l2w
pl_light.worldToLight = l2w.inverse()
# Soft Volume science
sv_mod, sv_key = bo.plasma_modifiers.softvolume, None
if sv_mod.enabled:
sv_key = sv_mod.get_key(self._exporter())
elif rtlamp.lamp_region:
sv_bo = rtlamp.lamp_region
sv_mod = sv_bo.plasma_modifiers.softvolume
if not sv_mod.enabled:
raise ExportError("'{}': '{}' is not a SoftVolume".format(bo.name, sv_bo.name))
sv_key = sv_mod.get_key(self._exporter())
pl_light.softVolume = sv_key
# Is this a projector?
projectors = tuple(self.get_projectors(bl_light))
if projectors:
self._export_rt_projector(bo, pl_light, projectors)
# If the lamp has any sort of animation attached, then it needs to be marked movable.
# Otherwise, Plasma may not use it for lighting.
if projectors or bo.plasma_object.has_animation_data:
pl_light.setProperty(plLightInfo.kLPMovable, True)
# *Sigh*
pl_light.sceneNode = self.mgr.get_scene_node(location=so.key.location)
def _export_rt_projector(self, bo, pl_light, tex_slots):
mat = self._exporter().mesh.material
slot = tex_slots[0]
# There is a Material available in the caller, but that is for the parent Mesh. We are a
# projection Lamp with our own faux Material. Unfortunately, Plasma only supports projecting
# one layer. We could exploit the fUnderLay and fOverLay system to export everything, but meh.
if len(tex_slots) > 1:
self._report.warn("Only one texture slot can be exported per Lamp. Picking the first one: '{}'".format(slot.name), indent=3)
layer = mat.export_texture_slot(bo, None, None, slot, 0, blend_flags=False)
state = layer.state
# Colors science'd from PRPs
layer.preshade = hsColorRGBA(0.5, 0.5, 0.5)
layer.runtime = hsColorRGBA(0.5, 0.5, 0.5)
# Props for projectors...
# Note that we tell the material exporter to (try not to) do any blend flags for us
layer.UVWSrc |= plLayer.kUVWPosition
if bo.data.type == "SPOT":
state.miscFlags |= hsGMatState.kMiscPerspProjection
else:
state.miscFlags |= hsGMatState.kMiscOrthoProjection
state.ZFlags |= hsGMatState.kZNoZWrite
pl_light.setProperty(plLightInfo.kLPCastShadows, False)
if slot.blend_type == "ADD":
state.blendFlags |= hsGMatState.kBlendAdd
pl_light.setProperty(plLightInfo.kLPOverAll, True)
elif slot.blend_type == "MULTIPLY":
# From PlasmaMAX
state.blendFlags |= hsGMatState.kBlendMult | hsGMatState.kBlendInvertColor | hsGMatState.kBlendInvertFinalColor
pl_light.setProperty(plLightInfo.kLPOverAll, True)
pl_light.projection = layer.key
def _export_shadow_master(self, bo, rtlamp, pl_light):
pClass = plDirectShadowMaster if isinstance(pl_light, plDirectionalLightInfo) else plPointShadowMaster
shadow = self.mgr.find_create_object(pClass, bl=bo)
shadow.attenDist = rtlamp.shadow_falloff
shadow.maxDist = rtlamp.shadow_distance
shadow.minDist = rtlamp.shadow_distance * 0.75
shadow.power = rtlamp.shadow_power / 100.0
shadow.setProperty(plShadowMaster.kSelfShadow, rtlamp.shadow_self)
def find_material_light_keys(self, bo, bm):
"""Given a blender material, we find the keys of all matching Plasma RT Lights.
NOTE: We return a tuple of lists: ([permaLights], [permaProjs])"""
self._report.msg("Searching for runtime lights...", indent=1)
permaLights = []
permaProjs = []
# We're going to inspect the material's light group.
# If there is no light group, we'll say that there is no runtime lighting...
# If there is, we will harvest all Blender lamps in that light group that are Plasma Objects
lg = bm.light_group
if lg is not None:
for obj in lg.objects:
if obj.type != "LAMP":
# moronic...
continue
elif not obj.plasma_object.enabled:
# who cares?
continue
lamp = obj.data
# Check to see if they only want this light to work on its layer...
if lamp.use_own_layer:
# Pairs up elements from both layers sequences such that we can compare
# to see if the lamp and object are in the same layer.
# If you can think of a better way, be my guest.
test = zip(bo.layers, obj.layers)
for i in test:
if i == (True, True):
break
else:
# didn't find a layer where both lamp and object were, skip it.
self._report.msg("[{}] '{}': not in same layer, skipping...",
lamp.type, obj.name, indent=2)
continue
# This is probably where PermaLight vs PermaProj should be sorted out...
pl_light = self.get_light_key(obj, lamp, None)
if self._is_projection_lamp(lamp):
self._report.msg("[{}] PermaProj '{}'", lamp.type, obj.name, indent=2)
permaProjs.append(pl_light)
else:
self._report.msg("[{}] PermaLight '{}'", lamp.type, obj.name, indent=2)
permaLights.append(pl_light)
return (permaLights, permaProjs)
def get_light_key(self, bo, bl_light, so):
try:
xlate = _BL2PL[bl_light.type]
return self.mgr.find_create_key(xlate, bl=bo, so=so)
except LookupError:
raise BlenderOptionNotSupportedError("Object ('{}') lamp type '{}'".format(bo.name, bl_light.type))
def get_projectors(self, bl_light):
for tex in bl_light.texture_slots:
if tex is not None and tex.texture is not None:
yield tex
def _is_projection_lamp(self, bl_light):
for tex in bl_light.texture_slots:
if tex is None or tex.texture is None:
continue
return True
return False
@property
def mgr(self):
return self._exporter().mgr
@property
def _report(self):
return self._exporter().report
|
'''
Created on 15 Φεβ 2013
@author: tedlaz
'''
sqlco = u"INSERT INTO m12_co VALUES (1,'{0}','{1}','{2}',{3},'{4}','{5}','{6}','{7}','{8}','{9}','{10}','{11}','{12}','{13}')"
from PyQt4 import QtCore, QtGui,Qt
from utils import dbutils,widgets
from osyk import osyk
from utils.qtutils import fFindFromList
import datetime
class NewDbWizard(QtGui.QWizard):
def __init__(self, parent=None):
super(NewDbWizard, self).__init__(parent)
#self.setAttribute(Qt.Qt.WA_DeleteOnClose) Οχι γιατί δημιουργείται πρόβλημα ...
#self.addPage(IntroPage())
self.addPage(coDataPage())
self.addPage(coDataPage2())
self.addPage(filePage())
self.addPage(finalPage())
self.setWizardStyle(QtGui.QWizard.ModernStyle)
self.setOption(QtGui.QWizard.IndependentPages,True)
#self.setPixmap(QtGui.QWizard.BannerPixmap,QtGui.QPixmap(':/banner'))
#self.setPixmap(QtGui.QWizard.BackgroundPixmap, QtGui.QPixmap(':/background'))
self.setWindowTitle(u"Οδηγός Δημιουργίας Νέου Αρχείου Μισθοδοσίας")
def accept(self):
#print '%s %s %s' % (self.field('epon'),self.field('cotyp_id'),self.field('fname'))
fileSql = open('newDb.sql')
script = u''
for lines in fileSql:
script += u'%s' % lines.decode('utf-8')
dbutils.executeScript(script, self.field('fname'))
sqlCo = sqlco.format(self.field('epon'),self.field('onom'),self.field('patr'),self.field('cotyp_id'),
self.field('ame'),self.field('afm'),self.field('doy'),self.field('dra'),
self.field('pol'),self.field('odo'),self.field('num'),self.field('tk'),
self.field('ikac'),self.field('ikap'))
#print sqlCo
dbutils.commitToDb(sqlCo, self.field('fname'))
sqlCoy = u"INSERT INTO m12_coy VALUES (1,1,'Κεντρικό','%s')" % self.field('kad')
dbutils.commitToDb(sqlCoy, self.field('fname'))
etos = datetime.datetime.now().year
dbutils.commitToDb(u"INSERT INTO m12_xrisi (xrisi,xrisip) VALUES ('{0}','Χρήση {0}')".format(etos), self.field('fname'))
eidList = osyk.eid_cad_listFilteredDouble(self.field('kad'))
print eidList
sqleid_ = u"INSERT INTO m12_eid (eidp,keid) VALUES ('{0}','{1}');\n"
sqleid = u''
for el in eidList:
sqleid += sqleid_.format(el[1],el[0])
dbutils.executeScript(sqleid,self.field('fname'))
super(NewDbWizard, self).accept()
class IntroPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(IntroPage, self).__init__(parent)
self.setTitle(u"Οδηγίες")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark1'))
label = QtGui.QLabel(u"Αυτός ο οδηγός θα δημιουργήσει νέο Αρχείο Μισθοδοσίας.\n\n "
u"Εσείς θα πρέπει απλά να εισάγετε τις απαραίτητες παραμέτρους "
u"καθώς και το όνομα του αρχείου και το σημείο αποθήκευσης.\n\n"
u"Μπορείτε σε κάθε βήμα να αναθεωρήσετε και να επιστρέψετε.\n\n"
u"Πατήστε δημιουργία στην τελευταία οθόνη για να ολοκληρώσετε.")
label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(label)
self.setLayout(layout)
class coDataPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(coDataPage, self).__init__(parent)
#parent.button(QtGui.QWizard.BackButton).setVisible(False)
#self.buttonText(QtGui.QWizard.NextButton)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Πληροφορίες εταιρίας")
self.setSubTitle(u"Συμπληρώστε τα βασικά στοιχεία της εταιρίας")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:")
cotyp = widgets.DbComboBox([[1,u'Νομικό Πρόσωπο'],[2,u'Φυσικό Πρόσωπο']])
cotypLabel.setBuddy(cotyp)
eponNameLabel = QtGui.QLabel(u"Επωνυμία:")
eponNameLineEdit = QtGui.QLineEdit()
eponNameLabel.setBuddy(eponNameLineEdit)
onomLabel = QtGui.QLabel(u"Όνομα (Για φυσικά πρόσωπα):")
onomLineEdit = QtGui.QLineEdit()
onomLineEdit.setDisabled(True)
onomLabel.setBuddy(onomLineEdit)
patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):")
patrLineEdit = QtGui.QLineEdit()
patrLineEdit.setDisabled(True)
patrLabel.setBuddy(patrLineEdit)
def onCotypActivated():
if cotyp.currentIndex() ==1:
onomLineEdit.setDisabled(False)
patrLineEdit.setDisabled(False)
else:
onomLineEdit.setText('')
patrLineEdit.setText('')
onomLineEdit.setDisabled(True)
patrLineEdit.setDisabled(True)
cotyp.activated.connect(onCotypActivated)
kadLabel = QtGui.QLabel(u"Κωδικός αρ.Δραστηριότητας:")
kadLineEdit = QtGui.QLineEdit()
kadLabel.setBuddy(kadLineEdit)
kadLineEdit.setReadOnly(True)
kadFindButton = QtGui.QPushButton(u'Εύρεση ΚΑΔ')
kadLayout = QtGui.QHBoxLayout()
kadLayout.addWidget(kadLineEdit)
kadLayout.addWidget(kadFindButton)
kadpLabel = QtGui.QLabel(u"Περιγραφή αρ.Δραστηριότητας:")
kadpTextEdit = QtGui.QTextEdit()
kadpLabel.setBuddy(kadpTextEdit)
kadpTextEdit.setReadOnly(True)
draLabel = QtGui.QLabel(u"Συντομογραφία Δραστηριότητας:")
draLineEdit = QtGui.QLineEdit()
draLabel.setBuddy(draLineEdit)
def openFindDlg():
kadList = osyk.cad_list()
head = [u'ΚΑΔ',u'Περιγραφή']
cw = [35,300]
form = fFindFromList(kadList,head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
kadLineEdit.setText(form.array[0])
kadpTextEdit.setText(form.array[1])
kadFindButton.clicked.connect(openFindDlg)
self.registerField('cotyp_id',cotyp,'timi')
self.registerField('epon*', eponNameLineEdit)
self.registerField('onom', onomLineEdit)
self.registerField('patr', patrLineEdit)
self.registerField('kad*', kadLineEdit)
self.registerField('dra*', draLineEdit)
#self.registerField('kadt*', kadpTextEdit)
layout = QtGui.QGridLayout()
layout.addWidget(cotypLabel, 0, 0)
layout.addWidget(cotyp, 0, 1)
layout.addWidget(eponNameLabel, 1, 0)
layout.addWidget(eponNameLineEdit, 1, 1)
layout.addWidget(onomLabel, 2, 0)
layout.addWidget(onomLineEdit, 2, 1)
layout.addWidget(patrLabel, 3, 0)
layout.addWidget(patrLineEdit, 3, 1)
layout.addWidget(kadLabel, 4, 0)
layout.addLayout(kadLayout, 4, 1)
layout.addWidget(kadpLabel,5, 0)
layout.addWidget(kadpTextEdit, 5, 1,2,1)
layout.addWidget(draLabel,7, 0)
layout.addWidget(draLineEdit,7, 1)
self.setLayout(layout)
class coDataPage2(QtGui.QWizardPage):
def __init__(self, parent=None):
super(coDataPage2, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Πληροφορίες εταιρίας")
self.setSubTitle(u"Συμπληρώστε τα υπόλοιπα στοιχεία της εταιρίας")
afmLabel = QtGui.QLabel(u"ΑΦΜ:")
afmLineEdit = QtGui.QLineEdit()
afmLabel.setBuddy(afmLineEdit)
doyLabel = QtGui.QLabel(u"ΔΟΥ:")
doyLineEdit = QtGui.QLineEdit()
doyLabel.setBuddy(doyLineEdit)
doyLineEdit.setReadOnly(True)
doyFindButton = QtGui.QPushButton(u'...')
doyFindButton.setMaximumSize(QtCore.QSize(20, 50))
doyLayout = QtGui.QHBoxLayout()
doyLayout.addWidget(doyLineEdit)
doyLayout.addWidget(doyFindButton)
def openFindDlg():
head = [u'Κωδ',u'ΔΟΥ']
cw = [35,300]
form = fFindFromList(osyk.doy_list('./osyk/doy.txt'),head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
doyLineEdit.setText(form.array[1])
doyFindButton.clicked.connect(openFindDlg)
poliLabel = QtGui.QLabel(u"Πόλη:")
poliLineEdit = QtGui.QLineEdit()
poliLabel.setBuddy(poliLineEdit)
tkLabel = QtGui.QLabel(u"Ταχ.Κωδικός:")
tkLineEdit = QtGui.QLineEdit()
tkLabel.setBuddy(tkLineEdit)
odosLabel = QtGui.QLabel(u"Οδός:")
odosLineEdit = QtGui.QLineEdit()
odosLabel.setBuddy(odosLineEdit)
numLabel = QtGui.QLabel(u"Αριθμός:")
numLineEdit = QtGui.QLineEdit()
numLabel.setBuddy(numLineEdit)
ameLabel = QtGui.QLabel(u"Αρ.Μητρ.ΙΚΑ:")
ameLineEdit = QtGui.QLineEdit()
ameLabel.setBuddy(ameLineEdit)
ikacLabel = QtGui.QLabel(u"Κωδ.ΙΚΑ:")
ikacLineEdit = QtGui.QLineEdit()
ikacLabel.setBuddy(ikacLineEdit)
ikacLineEdit.setReadOnly(True)
ikaLabel = QtGui.QLabel(u"Υπ/μα.ΙΚΑ:")
ikaLineEdit = QtGui.QLineEdit()
ikaLabel.setBuddy(ikaLineEdit)
ikaLineEdit.setReadOnly(True)
ikaFindButton = QtGui.QPushButton(u'...')
ikaFindButton.setMaximumSize(QtCore.QSize(20, 50))
ikaLayout = QtGui.QHBoxLayout()
ikaLayout.addWidget(ikaLineEdit)
ikaLayout.addWidget(ikaFindButton)
def openFindDlgIKA():
head = [u'Κωδ',u'Υποκατάστημα ΙΚΑ']
cw = [35,300]
form = fFindFromList(osyk.doy_list('./osyk/ika.txt'),head,cw)
if form.exec_() == QtGui.QDialog.Accepted:
ikacLineEdit.setText(form.array[0])
ikaLineEdit.setText(form.array[1])
ikaFindButton.clicked.connect(openFindDlgIKA)
self.registerField('afm*',afmLineEdit)
self.registerField('doy*',doyLineEdit)
self.registerField('pol*',poliLineEdit)
self.registerField('odo',odosLineEdit)
self.registerField('num',numLineEdit)
self.registerField('tk',tkLineEdit)
self.registerField('ikac*',ikacLineEdit)
self.registerField('ikap*',ikaLineEdit)
self.registerField('ame*',ameLineEdit)
layout = QtGui.QGridLayout()
layout.addWidget(afmLabel, 0, 0)
layout.addWidget(afmLineEdit, 0, 1)
layout.addWidget(doyLabel, 0, 2)
layout.addLayout(doyLayout, 0, 3)
layout.addWidget(poliLabel, 1, 0)
layout.addWidget(poliLineEdit, 1, 1)
layout.addWidget(tkLabel, 1, 2)
layout.addWidget(tkLineEdit, 1, 3)
layout.addWidget(odosLabel, 2, 0)
layout.addWidget(odosLineEdit, 2, 1)
layout.addWidget(numLabel, 2, 2)
layout.addWidget(numLineEdit, 2, 3)
layout.addWidget(ameLabel, 3, 0)
layout.addWidget(ameLineEdit, 3, 1)
layout.addWidget(ikacLabel, 4, 0)
layout.addWidget(ikacLineEdit, 4, 1)
layout.addWidget(ikaLabel, 4, 2)
layout.addLayout(ikaLayout, 4, 3)
self.setLayout(layout)
class filePage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(filePage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.NextButton,u'Επόμενο >')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Όνομα αρχείου")
self.setSubTitle(u"Δώστε όνομα και περιοχή αποθήκευσης")
#self.setPixmap(QtGui.QWizard.LogoPixmap, QtGui.QPixmap(':/logo1'))
fileNameLabel = QtGui.QLabel(u"Όνομα αρχείου:")
self.fileNameLineEdit = QtGui.QLineEdit()
self.fileNameLineEdit.setReadOnly(True)
fileNameLabel.setBuddy(self.fileNameLineEdit)
butFile = QtGui.QPushButton(u'...')
butFile.clicked.connect(self.fSave)
fileLayout = QtGui.QHBoxLayout()
fileLayout.addWidget(self.fileNameLineEdit)
fileLayout.addWidget(butFile)
patrLabel = QtGui.QLabel(u"Πατρώνυμο (Για φυσικά πρόσωπα):")
patrLineEdit = QtGui.QLineEdit()
patrLabel.setBuddy(patrLineEdit)
cotypLabel = QtGui.QLabel(u"Τύπος επιχείρησης:")
cotyp = QtGui.QComboBox()
cotypLabel.setBuddy(cotyp)
cotyp.addItems([u'1.Νομικό Πρόσωπο',u'2.Φυσικό Πρόσωπο'])
self.registerField('fname*', self.fileNameLineEdit)
layout = QtGui.QGridLayout()
layout.addWidget(fileNameLabel, 0, 0)
layout.addLayout(fileLayout, 0, 1)
self.setLayout(layout)
def fSave(self):
fileName = QtGui.QFileDialog.getSaveFileName(self,
"QFileDialog.getSaveFileName()",
self.field('fname'),
"payroll m13 (*.m13)", QtGui.QFileDialog.Options())
if fileName:
self.fileNameLineEdit.setText(fileName)
class finalPage(QtGui.QWizardPage):
def __init__(self, parent=None):
super(finalPage, self).__init__(parent)
self.setButtonText(QtGui.QWizard.BackButton,u'< Πίσω')
self.setButtonText(QtGui.QWizard.FinishButton,u'Ολοκλήρωση')
self.setButtonText(QtGui.QWizard.CancelButton,u'Ακύρωση')
self.setTitle(u"Δημιουργία αρχείου ")
#self.setPixmap(QtGui.QWizard.WatermarkPixmap, QtGui.QPixmap(':/watermark2'))
self.label = QtGui.QLabel()
self.label.setWordWrap(True)
layout = QtGui.QVBoxLayout()
layout.addWidget(self.label)
self.setLayout(layout)
def initializePage(self):
finishText = self.wizard().buttonText(QtGui.QWizard.FinishButton)
finishText.replace('&', '')
txt = u'Προσοχή , θα δημιουργηθεί αρχείο μισθοδοσίας με τις παρακάτω παραμέτρους :\n\n'
txt += u'Στοιχεία Επιχείρησης : %s \n\n' % self.field('epon')
txt += u'Όνομα Αρχείου : %s \n\n' % self.field('fname')
txt += u"\nΠατήστε %s για να ολοκληρωθεί η διαδικασία." % finishText
self.label.setText(txt)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
wizard = NewDbWizard()
wizard.show()
sys.exit(app.exec_())
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Box.uuid'
db.delete_column(u'appulet_box', 'uuid')
# Deleting field 'Route.uuid'
db.delete_column(u'appulet_route', 'uuid')
# Deleting field 'InteractiveImage.uuid'
db.delete_column(u'appulet_interactiveimage', 'uuid')
# Deleting field 'Highlight.uuid'
db.delete_column(u'appulet_highlight', 'uuid')
# Deleting field 'Step.uuid'
db.delete_column(u'appulet_step', 'uuid')
# Deleting field 'Reference.uuid'
db.delete_column(u'appulet_reference', 'uuid')
# Deleting field 'Track.uuid'
db.delete_column(u'appulet_track', 'uuid')
def backwards(self, orm):
# Adding field 'Box.uuid'
db.add_column(u'appulet_box', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'Route.uuid'
db.add_column(u'appulet_route', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'InteractiveImage.uuid'
db.add_column(u'appulet_interactiveimage', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'Highlight.uuid'
db.add_column(u'appulet_highlight', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'Step.uuid'
db.add_column(u'appulet_step', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'Reference.uuid'
db.add_column(u'appulet_reference', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
# Adding field 'Track.uuid'
db.add_column(u'appulet_track', 'uuid',
self.gf('django.db.models.fields.CharField')(default='', max_length=40, blank=True),
keep_default=False)
models = {
u'appulet.box': {
'Meta': {'object_name': 'Box'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interactive_image': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'boxes'", 'to': u"orm['appulet.InteractiveImage']"}),
'max_x': ('django.db.models.fields.IntegerField', [], {}),
'max_y': ('django.db.models.fields.IntegerField', [], {}),
'message_ca': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'message_en': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'message_es': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'message_fr': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'message_oc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'min_x': ('django.db.models.fields.IntegerField', [], {}),
'min_y': ('django.db.models.fields.IntegerField', [], {})
},
u'appulet.highlight': {
'Meta': {'object_name': 'Highlight'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'highlights'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'long_text_ca': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'long_text_en': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'long_text_es': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'long_text_fr': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'long_text_oc': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'media': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_ca': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name_es': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name_oc': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'radius': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'step': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'highlights'", 'null': 'True', 'to': u"orm['appulet.Step']"}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
u'appulet.interactiveimage': {
'Meta': {'object_name': 'InteractiveImage'},
'highlight': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'interactive_images'", 'null': 'True', 'to': u"orm['appulet.Highlight']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'})
},
u'appulet.rating': {
'Meta': {'object_name': 'Rating'},
'highlight': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['appulet.Highlight']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {}),
'route': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ratings'", 'null': 'True', 'to': u"orm['appulet.Route']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ratings'", 'to': u"orm['auth.User']"})
},
u'appulet.reference': {
'Meta': {'object_name': 'Reference'},
'general': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'highlight': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'references'", 'null': 'True', 'to': u"orm['appulet.Highlight']"}),
'html_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_ca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_es': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_oc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
u'appulet.route': {
'Meta': {'object_name': 'Route'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'routes'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description_ca': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description_es': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'description_oc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'gpx_pois': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'gpx_track': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'gpx_waypoints': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_route_based_on': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['appulet.Route']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'local_carto': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_ca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_es': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_oc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'official': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reference': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['appulet.Reference']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'short_description_ca': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'short_description_es': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'short_description_oc': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'track': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['appulet.Track']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'upload_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'appulet.step': {
'Meta': {'object_name': 'Step'},
'absolute_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'altitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'precision': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'track': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'steps'", 'null': 'True', 'to': u"orm['appulet.Track']"})
},
u'appulet.track': {
'Meta': {'object_name': 'Track'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_ca': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_es': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'name_oc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['appulet']
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ArcheoCAD(object):
def setupUi(self, ArcheoCAD):
ArcheoCAD.setObjectName("ArcheoCAD")
ArcheoCAD.resize(344, 728)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(ArcheoCAD.sizePolicy().hasHeightForWidth())
ArcheoCAD.setSizePolicy(sizePolicy)
ArcheoCAD.setMinimumSize(QtCore.QSize(330, 702))
ArcheoCAD.setMaximumSize(QtCore.QSize(16777215, 16777215))
ArcheoCAD.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
ArcheoCAD.setSizeGripEnabled(False)
self.gridLayout = QtWidgets.QGridLayout(ArcheoCAD)
self.gridLayout.setObjectName("gridLayout")
self.gBox_pointLayer = QtWidgets.QGroupBox(ArcheoCAD)
self.gBox_pointLayer.setObjectName("gBox_pointLayer")
self.gridLayout_2 = QtWidgets.QGridLayout(self.gBox_pointLayer)
self.gridLayout_2.setObjectName("gridLayout_2")
self.qgsComboPointLayer = QgsMapLayerComboBox(self.gBox_pointLayer)
self.qgsComboPointLayer.setObjectName("qgsComboPointLayer")
self.gridLayout_2.addWidget(self.qgsComboPointLayer, 0, 0, 1, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.chkBoxSelected = QtWidgets.QCheckBox(self.gBox_pointLayer)
self.chkBoxSelected.setObjectName("chkBoxSelected")
self.horizontalLayout_2.addWidget(self.chkBoxSelected)
self.gridLayout_2.addLayout(self.horizontalLayout_2, 1, 0, 1, 1)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.TxtLabel_geoChoice = QtWidgets.QLabel(self.gBox_pointLayer)
self.TxtLabel_geoChoice.setObjectName("TxtLabel_geoChoice")
self.verticalLayout.addWidget(self.TxtLabel_geoChoice)
self.TxtLable_geoChoice2 = QtWidgets.QLabel(self.gBox_pointLayer)
self.TxtLable_geoChoice2.setObjectName("TxtLable_geoChoice2")
self.verticalLayout.addWidget(self.TxtLable_geoChoice2)
self.comboGeoChoice = QtWidgets.QComboBox(self.gBox_pointLayer)
self.comboGeoChoice.setObjectName("comboGeoChoice")
self.verticalLayout.addWidget(self.comboGeoChoice)
self.gridLayout_2.addLayout(self.verticalLayout, 2, 0, 1, 1)
self.gridLayout.addWidget(self.gBox_pointLayer, 0, 0, 1, 1)
self.gBox_multipleTr = QtWidgets.QGroupBox(ArcheoCAD)
self.gBox_multipleTr.setObjectName("gBox_multipleTr")
self.gridLayout_3 = QtWidgets.QGridLayout(self.gBox_multipleTr)
self.gridLayout_3.setObjectName("gridLayout_3")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.chkBoxFieldGroup = QtWidgets.QCheckBox(self.gBox_multipleTr)
self.chkBoxFieldGroup.setObjectName("chkBoxFieldGroup")
self.horizontalLayout.addWidget(self.chkBoxFieldGroup)
self.gridLayout_3.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.label_regroup = QtWidgets.QLabel(self.gBox_multipleTr)
self.label_regroup.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_regroup.setObjectName("label_regroup")
self.verticalLayout_4.addWidget(self.label_regroup)
self.comboGroup = QtWidgets.QComboBox(self.gBox_multipleTr)
self.comboGroup.setEnabled(False)
self.comboGroup.setObjectName("comboGroup")
self.verticalLayout_4.addWidget(self.comboGroup)
self.gridLayout_3.addLayout(self.verticalLayout_4, 1, 0, 1, 1)
self.gridLayout.addWidget(self.gBox_multipleTr, 1, 0, 1, 1)
self.gBox_Sort = QtWidgets.QGroupBox(ArcheoCAD)
self.gBox_Sort.setObjectName("gBox_Sort")
self.gridLayout_4 = QtWidgets.QGridLayout(self.gBox_Sort)
self.gridLayout_4.setObjectName("gridLayout_4")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.chkBoxSort = QtWidgets.QCheckBox(self.gBox_Sort)
self.chkBoxSort.setObjectName("chkBoxSort")
self.horizontalLayout_6.addWidget(self.chkBoxSort)
self.gridLayout_4.addLayout(self.horizontalLayout_6, 0, 0, 1, 1)
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.label_sort = QtWidgets.QLabel(self.gBox_Sort)
self.label_sort.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_sort.setObjectName("label_sort")
self.verticalLayout_6.addWidget(self.label_sort)
self.comboSort = QtWidgets.QComboBox(self.gBox_Sort)
self.comboSort.setEnabled(False)
self.comboSort.setObjectName("comboSort")
self.verticalLayout_6.addWidget(self.comboSort)
self.gridLayout_4.addLayout(self.verticalLayout_6, 1, 0, 1, 1)
self.gridLayout.addWidget(self.gBox_Sort, 2, 0, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(ArcheoCAD)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 4, 0, 1, 1)
self.gBox_Output = QtWidgets.QGroupBox(ArcheoCAD)
self.gBox_Output.setObjectName("gBox_Output")
self.gridLayout_5 = QtWidgets.QGridLayout(self.gBox_Output)
self.gridLayout_5.setObjectName("gridLayout_5")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.label_enconding = QtWidgets.QLabel(self.gBox_Output)
self.label_enconding.setObjectName("label_enconding")
self.horizontalLayout_5.addWidget(self.label_enconding)
self.comboEncoding = QtWidgets.QComboBox(self.gBox_Output)
self.comboEncoding.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.comboEncoding.setObjectName("comboEncoding")
self.horizontalLayout_5.addWidget(self.comboEncoding)
self.gridLayout_5.addLayout(self.horizontalLayout_5, 5, 0, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_VertexNb = QtWidgets.QLabel(self.gBox_Output)
self.label_VertexNb.setObjectName("label_VertexNb")
self.horizontalLayout_3.addWidget(self.label_VertexNb)
self.spinBoxNbVertices = QtWidgets.QSpinBox(self.gBox_Output)
self.spinBoxNbVertices.setWrapping(False)
self.spinBoxNbVertices.setSpecialValueText("")
self.spinBoxNbVertices.setPrefix("")
self.spinBoxNbVertices.setMinimum(10)
self.spinBoxNbVertices.setMaximum(999)
self.spinBoxNbVertices.setProperty("value", 90)
self.spinBoxNbVertices.setObjectName("spinBoxNbVertices")
self.horizontalLayout_3.addWidget(self.spinBoxNbVertices)
self.gridLayout_5.addLayout(self.horizontalLayout_3, 0, 0, 1, 1)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.outFileLine = QtWidgets.QLineEdit(self.gBox_Output)
self.outFileLine.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.outFileLine.setObjectName("outFileLine")
self.horizontalLayout_4.addWidget(self.outFileLine)
self.ButtonBrowse = QtWidgets.QPushButton(self.gBox_Output)
self.ButtonBrowse.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.ButtonBrowse.setObjectName("ButtonBrowse")
self.horizontalLayout_4.addWidget(self.ButtonBrowse)
self.gridLayout_5.addLayout(self.horizontalLayout_4, 1, 0, 1, 1)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.radioButPolyG = QtWidgets.QRadioButton(self.gBox_Output)
self.radioButPolyG.setChecked(True)
self.radioButPolyG.setObjectName("radioButPolyG")
self.horizontalLayout_7.addWidget(self.radioButPolyG)
self.radioButPolyL = QtWidgets.QRadioButton(self.gBox_Output)
self.radioButPolyL.setObjectName("radioButPolyL")
self.horizontalLayout_7.addWidget(self.radioButPolyL)
self.gridLayout_5.addLayout(self.horizontalLayout_7, 2, 0, 1, 1)
self.gridLayout.addWidget(self.gBox_Output, 3, 0, 1, 1)
self.retranslateUi(ArcheoCAD)
self.buttonBox.accepted.connect(ArcheoCAD.accept)
self.buttonBox.rejected.connect(ArcheoCAD.reject)
self.chkBoxFieldGroup.toggled['bool'].connect(self.comboGroup.setEnabled)
self.chkBoxSort.toggled['bool'].connect(self.comboSort.setEnabled)
QtCore.QMetaObject.connectSlotsByName(ArcheoCAD)
def retranslateUi(self, ArcheoCAD):
_translate = QtCore.QCoreApplication.translate
ArcheoCAD.setWindowTitle(_translate("ArcheoCAD", "ArcheoCAD - Polygon-Circle-Ellipse-Polyline"))
self.gBox_pointLayer.setTitle(_translate("ArcheoCAD", "Input point layer"))
self.chkBoxSelected.setText(_translate("ArcheoCAD", "Create output features using only selected points"))
self.TxtLabel_geoChoice.setText(_translate("ArcheoCAD", "Input field containing the output geometry"))
self.TxtLable_geoChoice2.setText(_translate("ArcheoCAD", "(polygon, circle, ellipse, polyline)"))
self.gBox_multipleTr.setTitle(_translate("ArcheoCAD", "Regrouping field"))
self.chkBoxFieldGroup.setText(_translate("ArcheoCAD", "Regrouping points based on a specific field"))
self.label_regroup.setText(_translate("ArcheoCAD", "Input field to be used to regroup points "))
self.gBox_Sort.setTitle(_translate("ArcheoCAD", "Sort polygon vertices"))
self.chkBoxSort.setText(_translate("ArcheoCAD", "Sort vertices based on a specific field"))
self.label_sort.setText(_translate("ArcheoCAD", "Input field to be used for the sort order"))
self.gBox_Output.setTitle(_translate("ArcheoCAD", "Output shapefile"))
self.label_enconding.setText(_translate("ArcheoCAD", "Character encoding"))
self.label_VertexNb.setText(_translate("ArcheoCAD", "Number of vertices (used for circles and ellipses)"))
self.ButtonBrowse.setText(_translate("ArcheoCAD", "browse"))
self.radioButPolyG.setText(_translate("ArcheoCAD", "Polygon"))
self.radioButPolyL.setText(_translate("ArcheoCAD", "Polyline"))
from qgsmaplayercombobox import QgsMapLayerComboBox
|
'''Test module for ChatTextEdit widget'''
import sys
import os
from PyQt4 import QtGui
import gui
import e3
from gui.qt4ui import AvatarChooser
class SessionStub (object):
class ConfigDir (object):
def get_path(*args):
return '/home/fastfading/src/emesene/emesene2/'\
'messenger.hotmail.com/' \
'atarawhisky@hotmail.com/avatars/last'
def __init__(self):
self.config_dir = self.ConfigDir()
def main():
'''Main method'''
def test_stuff():
'''Makes varios test stuff'''
pass
test_stuff()
qapp = QtGui.QApplication(sys.argv)
window = AvatarChooser.AvatarChooser(SessionStub())
window.exec_()
#qapp.exec_()
if __name__ == "__main__":
main()
|
import cherrypy
from radical.auth import AuthController, require, member_of, name_is
from radical.lib.tool import template
class ProfileHandler:
# all methods in this controller (and subcontrollers) is
# open only to members of the admin group
_cp_config = {
'auth.require': []
}
@cherrypy.expose
@cherrypy.tools.mako(filename="profile.html")
def index(self):
return {'title':"Radical"}
|
""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
http://www.logilab.fr/ -- mailto:contact@logilab.fr
Raw metrics checker
"""
import tokenize
from ..logilab.common.ureports import Table
from ..interfaces import IRawChecker
from ..checkers import BaseRawChecker, EmptyReport
from ..reporters import diff_string
def report_raw_stats(sect, stats, old_stats):
"""calculate percentage of code / doc / comment / empty
"""
total_lines = stats['total_lines']
if not total_lines:
raise EmptyReport()
sect.description = '%s lines have been analyzed' % total_lines
lines = ('type', 'number', '%', 'previous', 'difference')
for node_type in ('code', 'docstring', 'comment', 'empty'):
key = node_type + '_lines'
total = stats[key]
percent = float(total * 100) / total_lines
old = old_stats.get(key, None)
if old is not None:
diff_str = diff_string(old, total)
else:
old, diff_str = 'NC', 'NC'
lines += (node_type, str(total), '%.2f' % percent,
str(old), diff_str)
sect.append(Table(children=lines, cols=5, rheaders=1))
class RawMetricsChecker(BaseRawChecker):
"""does not check anything but gives some raw metrics :
* total number of lines
* total number of code lines
* total number of docstring lines
* total number of comments lines
* total number of empty lines
"""
__implements__ = (IRawChecker,)
# configuration section name
name = 'metrics'
# configuration options
options = ( )
# messages
msgs = {}
# reports
reports = ( ('RP0701', 'Raw metrics', report_raw_stats), )
def __init__(self, linter):
BaseRawChecker.__init__(self, linter)
self.stats = None
def open(self):
"""init statistics"""
self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
empty_lines=0, docstring_lines=0,
comment_lines=0)
def process_tokens(self, tokens):
"""update stats"""
i = 0
tokens = list(tokens)
while i < len(tokens):
i, lines_number, line_type = get_type(tokens, i)
self.stats['total_lines'] += lines_number
self.stats[line_type] += lines_number
JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
def get_type(tokens, start_index):
"""return the line type : docstring, comment, code, empty"""
i = start_index
tok_type = tokens[i][0]
start = tokens[i][2]
pos = start
line_type = None
while i < len(tokens) and tokens[i][2][0] == start[0]:
tok_type = tokens[i][0]
pos = tokens[i][3]
if line_type is None:
if tok_type == tokenize.STRING:
line_type = 'docstring_lines'
elif tok_type == tokenize.COMMENT:
line_type = 'comment_lines'
elif tok_type in JUNK:
pass
else:
line_type = 'code_lines'
i += 1
if line_type is None:
line_type = 'empty_lines'
elif i < len(tokens) and tok_type == tokenize.NEWLINE:
i += 1
return i, pos[0] - start[0] + 1, line_type
def register(linter):
""" required method to auto register this checker """
linter.register_checker(RawMetricsChecker(linter))
|
"""
Remote care specific test that tests things
that were not covered by other tests in the apps.
.. note::
Needs to be extended with tests for all core code.
:subtitle:`Class definitions:`
"""
import datetime
from django.forms import TextInput
from django.test import TestCase
from core.forms import DisplayWidget, ChoiceOtherField, YesNoChoiceField,\
FormDateField, NONE_YES_NO_CHOICES
from core.models import YesNoChoiceField as ModelYesNoChoiceField,\
CheckBoxIntegerField, CheckBoxCharField
from core.widgets import SelectDateWidget
class CoreTests(TestCase):
"""
Class with tests for modules in the core package
"""
def check_forms(self):
"""
Checks parts from the forms module
"""
# display widget
display_widget = DisplayWidget()
display_widget.clean('test')
display_widget.render('name', 'value')
choice_other_field = ChoiceOtherField(
choices=(('1', 1), ('2', 2), ('other', 'other')),
other_field=TextInput)
post_data = {'testfield_0': '1'}
field_name = 'testfield'
# Check selected value
choice_other_field.compress(list(post_data))
choice_other_field.fix_value_from_post(post_data, field_name)
self.assertIn('testfield', post_data)
self.assertEqual(post_data['testfield'], '1')
# Check 'other' value
post_data = {'testfield_0': 'other', 'testfield_1': 'test'}
choice_other_field.compress(list(post_data))
choice_other_field.fix_value_from_post(post_data, field_name)
self.assertIn('testfield', post_data)
self.assertEqual(post_data['testfield'], 'test')
date_field = FormDateField()
post_data = {'testfield_day': '1',
'testfield_month': '1',
'testfield_year': '1970'}
date_field.fix_value_from_post(post_data, field_name)
self.assertIn('testfield', post_data)
self.assertEqual(post_data['testfield'], datetime.date(1970, 1, 1))
yes_no_choicefield = YesNoChoiceField()
self.assertEqual(
yes_no_choicefield.widget.choices, NONE_YES_NO_CHOICES)
def check_models(self):
"""
Checks parts from the models module
"""
yes_no_choicefield = ModelYesNoChoiceField()
self.assertEqual(yes_no_choicefield.formfield().__class__,
YesNoChoiceField)
# Just check that these don't give errors
check_box_int_field = CheckBoxIntegerField()
check_box_int_field.formfield()
check_box_text_field = CheckBoxCharField()
check_box_text_field.formfield()
def check_widgets(self):
"""
Checks parts from the widgets module
"""
date_widget = SelectDateWidget(years=range(1970, 1980))
formats = ['n', 'm', 'F', 'b', 'M', 'N', None]
for fmt in formats:
date_widget.month_choices(fmt)
formats = ['j', 'd', None]
for fmt in formats:
date_widget.day_choices(fmt)
formats = ['Y', 'y', None]
for fmt in formats:
date_widget.year_choices(fmt)
def test_core(self):
"""
Only checks parts that are not covered by other Remote Care tests
"""
self.check_forms()
self.check_models()
self.check_widgets()
|
"""
Pints the list of MIDI devices available for inpout use.
"""
import sys
import os
import pygame
import pygame.midi
from pygame.locals import *
try: # Ensure set available for output example
set
except NameError:
from sets import Set as set
def print_device_info():
pygame.midi.init()
_print_device_info()
pygame.midi.quit()
def _print_device_info():
for i in range( pygame.midi.get_count() ):
r = pygame.midi.get_device_info(i)
(interf, name, input, output, opened) = r
in_out = ""
if input:
print("\t- Name: " + str(name) + "\tInterface: " + str(interf) )
if __name__ == '__main__':
print("\nHere is the list of MIDI devices available for INPUT:")
print_device_info()
print("\n")
|
"""
mtpy/mtpy/analysis/niblettbostick.py
Contains functions for the calculation of the Niblett-Bostick transformation of
impedance tensors.
The methods follow
- Niblett
- Bostick
- Jones
- J. RODRIGUEZ, F.J. ESPARZA, E. GOMEZ-TREVINO
Niblett-Bostick transformations are possible in 1D and 2D.
Functions:
@UofA, 2013
(LK)
"""
import numpy as np
import scipy.interpolate as spi
import mtpy.core.z as MTz
import mtpy.analysis.geometry as MTge
import mtpy.utils.exceptions as MTex
import mtpy.utils.calculator as MTcc
import copy
reload(MTz)
def rhophi2rhodepth(rho, phase, period):
"""
Convert a period-dependent pair of rho/phase (Ohm meters/rad)
into rho/depth (Ohm meters/meters)
The conversion uses the simplified transformation without derivatives.
Input:
- apparent resistivity (Ohm meters
- phase angle (degrees)
- period (seconds)
Output:
- resistivity estimate (Ohm meters)
- depth (meters)
"""
depth = np.sqrt(rho*period/2/np.pi/MTcc.mu0)
# phase angle needed in rad
rho_nb = rho * (np.pi/2/np.deg2rad(phase%90) - 1)
#print rho,period,depth,rho_nb
#print 'rho: {0:.1f} \t-\t rhoNB: {3:.1f}\t-\t period: {1:.1f} \t-\t depth: {2:.1f}'.format(
# rho,period,depth,rho_nb)
return rho_nb, depth
def calculate_znb(z_object = None, z_array = None, periods = None):
"""
Determine an array of Z_nb (depth dependent Niblett-Bostick transformed Z)
from the 1D and 2D parts of an impedance tensor array Z.
input:
- Z (object or array)
- periods (mandatory, if Z is just array)
output:
- Z_nb
The calculation of the Z_nb needs 6 steps:
1) Determine the dimensionality of the Z(T), discard all 3D parts
2) Rotate all Z(T) to TE/TM setup (T_parallel/T_ortho)
3) Transform every component individually by Niblett-Bostick
4) collect the respective 2 components each for equal/similar depths
5) interprete them as TE_nb/TM_nb
6) set up Z_nb(depth)
If 1D layers occur inbetween 2D layers, the strike angle is undefined therein.
We take an - arbitrarily chosen - linear interpolation of strike angle for
these layers, with the values varying between the angles of the bounding
upper and lower 2D layers (linearly w.r.t. the periods).
Use the output for instance for the determination of
NB-transformed phase tensors.
Note:
No propagation of errors implemented yet!
"""
#deal with inputs
#if zobject:
#z = z_object.z
#periods = 1./z_object.freq
#else:
z = z_array
periods = periods
dimensions = MTge.dimensionality(z)
angles = MTge.strike_angle(z)
#reduce actual Z by the 3D layers:
z2 = z[np.where(dimensions != 3)[0]]
angles2 = angles[np.where(dimensions != 3)[0]]
periods2 = periods[np.where(dimensions != 3)[0]]
angles_incl1D = interpolate_strike_angles(angles2[:,0],periods2)
z3 = MTz.rotate_z(z2,-angles_incl1D)[0]
#at this point we assume that the two modes are the off-diagonal elements!!
#TE is element (1,2), TM at (2,1)
lo_nb_max = []
lo_nb_min = []
app_res = MTz.z2resphi(z3,periods2)[0]
phase = MTz.z2resphi(z3,periods2)[1]
for i,per in enumerate(periods):
te_rho, te_depth = rhophi2rhodepth(app_res[i][0,1], phase[i][0,1], per)
tm_rho, tm_depth = rhophi2rhodepth(app_res[i][1,0], phase[i][1,0], per)
if te_rho > tm_rho:
lo_nb_max.append([te_depth, te_rho])
lo_nb_min.append([tm_depth, tm_rho])
else:
lo_nb_min.append([te_depth, te_rho])
lo_nb_max.append([tm_depth, tm_rho])
return np.array(lo_nb_max), np.array(lo_nb_min)
def calculate_depth_nb(z_object = None, z_array = None, periods = None):
"""
Determine an array of Z_nb (depth dependent Niblett-Bostick transformed Z)
from the 1D and 2D parts of an impedance tensor array Z.
The calculation of the Z_nb needs 6 steps:
1) Determine the dimensionality of the Z(T), discard all 3D parts
2) Rotate all Z(T) to TE/TM setup (T_parallel/T_ortho)
3) Transform every component individually by Niblett-Bostick
4) collect the respective 2 components each for equal/similar depths
5) interprete them as TE_nb/TM_nb
6) set up Z_nb(depth)
If 1D layers occur inbetween 2D layers, the strike angle is undefined therein.
We take an - arbitrarily chosen - linear interpolation of strike angle for
these layers, with the values varying between the angles of the bounding
upper and lower 2D layers (linearly w.r.t. the periods).
Use the output for instance for the determination of
NB-transformed phase tensors.
Note:
No propagation of errors implemented yet!
Arguments
-------------
*z_object* : mtpy.core.z object
*z_array* : np.ndarray [num_periods, 2, 2]
*periods* : np.ndarray(num_periods)
only input if input z_array, otherwise periods are extracted
from z_object.freq
Returns
------------------
*depth_array* : np.ndarray(num_periods,
dtype=['period', 'depth_min', 'depth_max',
'rho_min', 'rho_max'])
numpy structured array with keywords.
- period --> period in s
- depth_min --> minimum depth estimated (m)
- depth_max --> maximum depth estimated (m)
- rho_min --> minimum resistivity estimated (Ohm-m)
- rho_max --> maximum resistivity estimated (Ohm-m)
Example
------------
>>> import mtpy.analysis.niblettbostick as nb
>>> depth_array = nb.calculate_znb(z_object=z1)
>>> # plot the results
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax = fig.add_subplot(1,1,1)
>>> ax.semilogy(depth_array['depth_min'], depth_array['period'])
>>> ax.semilogy(depth_array['depth_max'], depth_array['period'])
>>> plt.show()
"""
#deal with inputs
if z_object is not None:
z = z_object.z
periods = 1./z_object.freq
else:
z = z_array
periods = periods
dimensions = MTge.dimensionality(z_array=z)
angles = MTge.strike_angle(z_array=z)
#reduce actual Z by the 3D layers:
angles_2d = np.nan_to_num(angles[np.where(dimensions != 3)][:, 0])
periods_2d = periods[np.where(dimensions != 3)]
# interperpolate strike angle onto all periods
# make a function for strike using only 2d angles
strike_interp = spi.interp1d(periods_2d, angles_2d,
bounds_error=False,
fill_value=0)
strike_angles = strike_interp(periods)
# rotate z to be along the interpolated strike angles
z_rot = MTz.rotate_z(z, strike_angles)[0]
#at this point we assume that the two modes are the off-diagonal elements!!
#TE is element (1,2), TM at (2,1)
depth_array = np.zeros(periods.shape[0],
dtype=[('period', np.float),
('depth_min', np.float),
('depth_max', np.float),
('rho_min', np.float),
('rho_max', np.float)])
app_res, app_res_err, phase, phase_err = MTz.z2resphi(z_rot, periods)
for ii, per in enumerate(periods):
te_rho, te_depth = rhophi2rhodepth(app_res[ii, 0, 1],
phase[ii, 0, 1],
per)
tm_rho, tm_depth = rhophi2rhodepth(app_res[ii, 1, 0],
phase[ii, 1, 0],
per)
depth_array[ii]['period'] = per
depth_array[ii]['depth_min'] = min([te_depth, tm_depth])
depth_array[ii]['depth_max'] = max([te_depth, tm_depth])
depth_array[ii]['rho_min'] = min([te_rho, tm_rho])
depth_array[ii]['rho_max'] = max([te_rho, tm_rho])
return depth_array
# return arrays of the min and max depths
def calculate_rho_minmax(z_object = None, z_array = None, periods = None):
"""
Determine 2 arrays of Niblett-Bostick transformed aparent resistivities:
minumum and maximum values for respective periods.
Values are calculated from the 1D and 2D parts of an impedance tensor array Z.
input:
- Z (object or array)
- periods (mandatory, if Z is just array)
output:
- n x 3 array, depth/rho_nb/angle for rho_nb max
- n x 3 array, depth/rho_nb/angle for rho_nb min
The calculation is carried out by :
1) Determine the dimensionality of the Z(T), discard all 3D parts
2) loop over periods
* rotate Z and calculate app_res_NB for off-diagonal elements
* find maximum and minimum values
* write out respective depths and rho values
Note:
No propagation of errors implemented yet!
"""
#deal with inputs
#if zobject:
#z = z_object.z
#periods = 1./z_object.freq
#else:
z = z_array
periods = periods
dimensions = MTge.dimensionality(z)
angles = MTge.strike_angle(z)
#reduce actual Z by the 3D layers:
z2 = z[np.where(dimensions != 3)[0]]
angles2 = angles[np.where(dimensions != 3)[0]]
periods2 = periods[np.where(dimensions != 3)[0]]
lo_nb_max = []
lo_nb_min = []
rotsteps = 360
rotangles = np.arange(rotsteps)*180./rotsteps
for i,per in enumerate(periods2):
z_curr = z2[i]
temp_vals = np.zeros((rotsteps,4))
for jj,d in enumerate(rotangles):
new_z = MTcc.rotatematrix_incl_errors(z_curr, d)[0]
#print i,per,jj,d
res = MTz.z2resphi(new_z,per)[0]
phs = MTz.z2resphi(new_z,per)[1]
te_rho, te_depth = rhophi2rhodepth(res[0,1], phs[0,1], per)
tm_rho, tm_depth = rhophi2rhodepth(res[1,0], phs[1,0], per)
temp_vals[jj,0] = te_depth
temp_vals[jj,1] = te_rho
temp_vals[jj,2] = tm_depth
temp_vals[jj,3] = tm_rho
column = (np.argmax([ np.max(temp_vals[:,1]),
np.max(temp_vals[:,3])]))*2 + 1
maxidx = np.argmax(temp_vals[:,column])
max_rho = temp_vals[maxidx,column]
max_depth = temp_vals[maxidx,column-1]
max_ang = rotangles[maxidx]
#alternative 1
min_column = (np.argmin([ np.max(temp_vals[:,1]),
np.max(temp_vals[:,3])]))*2 + 1
if max_ang <= 90:
min_ang = max_ang + 90
else:
min_ang = max_ang - 90
minidx = np.argmin(np.abs(rotangles-min_ang))
min_rho = temp_vals[minidx,min_column]
min_depth = temp_vals[minidx,min_column-1]
lo_nb_max.append([max_depth, max_rho, max_ang])
lo_nb_min.append([min_depth, min_rho])
return np.array(lo_nb_max), np.array(lo_nb_min)
def interpolate_strike_angles(angles, in_periods):
"""
expect 2 arrays
1. sort ascending by periods
2. loop over angles to find 'nan' values (i.e. 1D layers)
3. determine linear interpolation between bounding 2D strike angles
4. if 1D on top or bottom, set to 0 degrees
"""
new_angles = copy.copy(angles)
#sort in ascending order:
orig_sorting = np.argsort(in_periods)
back_sorting = np.argsort(orig_sorting)
angles = angles[orig_sorting]
periods = in_periods[orig_sorting]
in_line = 0
while in_line < len(angles):
curr_per = periods[in_line]
curr_ang = angles[in_line]
if np.isnan(curr_ang):
if in_line in [0, len(angles)-1]:
new_angles[in_line] = 0.
in_line += 1
continue
#otherwise:
#use value before current one:
ang1 = new_angles[in_line - 1]
per1 = periods[in_line - 1]
#check for next non-nan:
ang2 = None
jj = in_line
print in_line
while jj < len(angles):
jj += 1
if not np.isnan(angles[jj]):
per2 = periods[jj]
ang2 = angles[jj]
break
#catch case of all nan:
if ang2 is None:
ang2 = 0.
delta_per = per2-per1
delta_ang = ang2-ang1
per_step = periods[in_line] - per1
new_angles[in_line] = ang1 + delta_ang/delta_per * per_step
print jj
in_line += 1
#asserting correct order (same as input) of the angles:
return new_angles[back_sorting]
|
"""
Request module
"""
from tools import pathjoin
class Request(object):
path = 'requests'
def __init__(self, bin=None, body={}, content_length=0, content_type='', form_data={}, headers={},
id='', method='', path='/', query_string={}, remote_addr='', time=None, **kwargs):
self.bin = bin
self.body = body
self.content_length = content_length
self.content_type = content_type
self.form_data = form_data
self.headers = headers
self.id = id
self.method = method
self.path = path
self.query_string = query_string
self.remote_addr = remote_addr
self.time = time
@classmethod
def from_response(cls, response, bin=None):
assert 200 <= response.status_code < 400, response.reason
data = response.json()
if type(data) == list:
ret = list()
for item in data:
ret.append(cls(bin=bin, **item))
return ret
return cls(bin=bin, **data)
@property
def api_url(self):
'''return the api url of this request'''
return pathjoin(Request.path, self.id, url=self.bin.api_url)
|
import os
import subprocess
import sys
import tempfile
from textwrap import dedent
from unittest import mock
import fixtures
from testtools.matchers import Contains, Equals, Not, StartsWith
import snapcraft
from . import ProjectLoaderBaseTest
from tests.fixture_setup.os_release import FakeOsRelease
class EnvironmentTest(ProjectLoaderBaseTest):
def setUp(self):
super().setUp()
self.snapcraft_yaml = dedent(
"""\
name: test
version: "1"
summary: test
description: test
confinement: strict
grade: stable
parts:
part1:
plugin: nil
"""
)
def test_config_snap_environment(self):
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
lib_paths = [
os.path.join(self.prime_dir, "lib"),
os.path.join(self.prime_dir, "usr", "lib"),
os.path.join(self.prime_dir, "lib", project_config.project.arch_triplet),
os.path.join(
self.prime_dir, "usr", "lib", project_config.project.arch_triplet
),
]
for lib_path in lib_paths:
os.makedirs(lib_path)
environment = project_config.snap_env()
self.assertThat(
environment,
Contains(
'PATH="{0}/usr/sbin:{0}/usr/bin:{0}/sbin:{0}/bin:$PATH"'.format(
self.prime_dir
)
),
)
# Ensure that LD_LIBRARY_PATH is present and it contains only the
# basics.
paths = []
for variable in environment:
if "LD_LIBRARY_PATH" in variable:
these_paths = variable.split("=")[1].strip()
paths.extend(these_paths.replace('"', "").split(":"))
self.assertTrue(len(paths) > 0, "Expected LD_LIBRARY_PATH to be in environment")
expected = (
os.path.join(self.prime_dir, i)
for i in [
"lib",
os.path.join("usr", "lib"),
os.path.join("lib", project_config.project.arch_triplet),
os.path.join("usr", "lib", project_config.project.arch_triplet),
]
)
for item in expected:
self.assertTrue(
item in paths,
"Expected LD_LIBRARY_PATH in {!r} to include {!r}".format(paths, item),
)
def test_config_snap_environment_with_no_library_paths(self):
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
environment = project_config.snap_env()
self.assertTrue(
'PATH="{0}/usr/sbin:{0}/usr/bin:{0}/sbin:{0}/bin:$PATH"'.format(
self.prime_dir
)
in environment,
"Current PATH is {!r}".format(environment),
)
for e in environment:
self.assertFalse(
"LD_LIBRARY_PATH" in e, "Current environment is {!r}".format(e)
)
@mock.patch.object(
snapcraft.internal.pluginhandler.PluginHandler, "get_primed_dependency_paths"
)
def test_config_snap_environment_with_dependencies(self, mock_get_dependencies):
library_paths = {
os.path.join(self.prime_dir, "lib1"),
os.path.join(self.prime_dir, "lib2"),
}
mock_get_dependencies.return_value = library_paths
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
for lib_path in library_paths:
os.makedirs(lib_path)
# Ensure that LD_LIBRARY_PATH is present and it contains the
# extra dependency paths.
paths = []
for variable in project_config.snap_env():
if "LD_LIBRARY_PATH" in variable:
these_paths = variable.split("=")[1].strip()
paths.extend(these_paths.replace('"', "").split(":"))
self.assertTrue(len(paths) > 0, "Expected LD_LIBRARY_PATH to be in environment")
expected = (os.path.join(self.prime_dir, i) for i in ["lib1", "lib2"])
for item in expected:
self.assertTrue(
item in paths,
"Expected LD_LIBRARY_PATH ({!r}) to include {!r}".format(paths, item),
)
@mock.patch.object(
snapcraft.internal.pluginhandler.PluginHandler, "get_primed_dependency_paths"
)
def test_config_snap_environment_with_dependencies_but_no_paths(
self, mock_get_dependencies
):
library_paths = {
os.path.join(self.prime_dir, "lib1"),
os.path.join(self.prime_dir, "lib2"),
}
mock_get_dependencies.return_value = library_paths
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
# Ensure that LD_LIBRARY_PATH is present, but is completey empty since
# no library paths actually exist.
for variable in project_config.snap_env():
self.assertFalse(
"LD_LIBRARY_PATH" in variable,
"Expected no LD_LIBRARY_PATH (got {!r})".format(variable),
)
def test_config_runtime_environment_ld(self):
# Place a few ld.so.conf files in supported locations. We expect the
# contents of these to make it into the LD_LIBRARY_PATH.
mesa_dir = os.path.join(self.prime_dir, "usr", "lib", "my_arch", "mesa")
os.makedirs(mesa_dir)
with open(os.path.join(mesa_dir, "ld.so.conf"), "w") as f:
f.write("/mesa")
mesa_egl_dir = os.path.join(self.prime_dir, "usr", "lib", "my_arch", "mesa-egl")
os.makedirs(mesa_egl_dir)
with open(os.path.join(mesa_egl_dir, "ld.so.conf"), "w") as f:
f.write("# Standalone comment\n")
f.write("/mesa-egl")
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
environment = project_config.snap_env()
# Ensure that the LD_LIBRARY_PATH includes all the above paths
paths = []
for variable in environment:
if "LD_LIBRARY_PATH" in variable:
these_paths = variable.split("=")[1].strip()
paths.extend(these_paths.replace('"', "").split(":"))
self.assertTrue(len(paths) > 0, "Expected LD_LIBRARY_PATH to be in environment")
expected = (os.path.join(self.prime_dir, i) for i in ["mesa", "mesa-egl"])
for item in expected:
self.assertTrue(
item in paths, 'Expected LD_LIBRARY_PATH to include "{}"'.format(item)
)
def test_config_env_dedup(self):
"""Regression test for LP: #1767625.
Verify that the use of after with multiple parts does not produce
duplicate exports.
"""
snapcraft_yaml = dedent(
"""\
name: test
version: "1"
summary: test
description: test
confinement: strict
grade: stable
parts:
main:
plugin: nil
after: [part1, part2, part3]
part1:
plugin: nil
part2:
plugin: nil
part3:
plugin: nil
"""
)
project_config = self.make_snapcraft_project(snapcraft_yaml)
part = project_config.parts.get_part("main")
environment = project_config.parts.build_env_for_part(part, root_part=True)
# We sort here for equality checking but they should not be sorted
# for a real case scenario.
environment.sort()
self.assertThat(
environment,
Equals(
[
(
'PATH="{0}/parts/main/install/usr/sbin:'
"{0}/parts/main/install/usr/bin:"
"{0}/parts/main/install/sbin:"
'{0}/parts/main/install/bin:$PATH"'
).format(self.path),
(
'PATH="{0}/stage/usr/sbin:'
"{0}/stage/usr/bin:"
"{0}/stage/sbin:"
'{0}/stage/bin:$PATH"'
).format(self.path),
'PERL5LIB="{0}/stage/usr/share/perl5/"'.format(self.path),
'SNAPCRAFT_ARCH_TRIPLET="{}"'.format(
project_config.project.arch_triplet
),
'SNAPCRAFT_PARALLEL_BUILD_COUNT="2"',
'SNAPCRAFT_PART_BUILD="{}/parts/main/build"'.format(self.path),
'SNAPCRAFT_PART_INSTALL="{}/parts/main/install"'.format(self.path),
'SNAPCRAFT_PART_SRC="{}/parts/main/src"'.format(self.path),
'SNAPCRAFT_PRIME="{}/prime"'.format(self.path),
'SNAPCRAFT_PROJECT_GRADE="stable"',
'SNAPCRAFT_PROJECT_NAME="test"',
'SNAPCRAFT_PROJECT_VERSION="1"',
'SNAPCRAFT_STAGE="{}/stage"'.format(self.path),
]
),
)
def test_config_stage_environment_confinement_classic(self):
self.useFixture(FakeOsRelease())
snapcraft_yaml = dedent(
"""\
name: test
version: "1"
summary: test
description: test
confinement: classic
grade: stable
base: core
parts:
part1:
plugin: nil
"""
)
project_config = self.make_snapcraft_project(snapcraft_yaml)
part = project_config.parts.get_part("part1")
environment = project_config.parts.build_env_for_part(part, root_part=True)
self.assertIn(
'LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{base_core_path}/lib:'
"{base_core_path}/usr/lib:{base_core_path}/lib/{arch_triplet}:"
'{base_core_path}/usr/lib/{arch_triplet}"'.format(
base_core_path=self.base_environment.core_path,
arch_triplet=project_config.project.arch_triplet,
),
environment,
)
def test_stage_environment_confinement_classic_with_incompat_host(self):
self.useFixture(FakeOsRelease(version_codename="incompatible-fake"))
snapcraft_yaml = dedent(
"""\
name: test
version: "1"
summary: test
description: test
confinement: classic
grade: stable
base: core
parts:
part1:
plugin: nil
"""
)
project_config = self.make_snapcraft_project(snapcraft_yaml)
part = project_config.parts.get_part("part1")
environment = project_config.parts.build_env_for_part(part, root_part=True)
for env_item in environment:
self.assertThat(env_item, Not(StartsWith("LD_LIBRARY_PATH")))
def test_stage_environment_confinement_classic_with_incompat_base(self):
snapcraft_yaml = """\
name: test
version: "1"
summary: test
description: test
confinement: classic
grade: stable
base: fake-core
parts:
part1:
plugin: nil
"""
project_config = self.make_snapcraft_project(snapcraft_yaml)
part = project_config.parts.get_part("part1")
environment = project_config.parts.build_env_for_part(part, root_part=True)
for env_item in environment:
self.assertThat(env_item, Not(StartsWith("LD_LIBRARY_PATH")))
def test_config_stage_environment(self):
arch_triplet = snapcraft.ProjectOptions().arch_triplet
paths = [
os.path.join(self.stage_dir, "lib"),
os.path.join(self.stage_dir, "lib", arch_triplet),
os.path.join(self.stage_dir, "usr", "lib"),
os.path.join(self.stage_dir, "usr", "lib", arch_triplet),
os.path.join(self.stage_dir, "include"),
os.path.join(self.stage_dir, "usr", "include"),
os.path.join(self.stage_dir, "include", arch_triplet),
os.path.join(self.stage_dir, "usr", "include", arch_triplet),
]
for path in paths:
os.makedirs(path)
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
environment = project_config.stage_env()
self.assertTrue(
'PATH="{0}/usr/sbin:{0}/usr/bin:{0}/sbin:{0}/bin:$PATH"'.format(
self.stage_dir
)
in environment
)
self.assertTrue(
'LD_LIBRARY_PATH="$LD_LIBRARY_PATH:{stage_dir}/lib:'
"{stage_dir}/usr/lib:{stage_dir}/lib/{arch_triplet}:"
'{stage_dir}/usr/lib/{arch_triplet}"'.format(
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
in environment,
"Current environment is {!r}".format(environment),
)
self.assertTrue(
'CFLAGS="$CFLAGS -I{stage_dir}/include -I{stage_dir}/usr/include '
"-I{stage_dir}/include/{arch_triplet} "
'-I{stage_dir}/usr/include/{arch_triplet}"'.format(
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
in environment,
"Current environment is {!r}".format(environment),
)
self.assertTrue(
'CPPFLAGS="$CPPFLAGS -I{stage_dir}/include '
"-I{stage_dir}/usr/include "
"-I{stage_dir}/include/{arch_triplet} "
'-I{stage_dir}/usr/include/{arch_triplet}"'.format(
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
in environment,
"Current environment is {!r}".format(environment),
)
self.assertTrue(
'CXXFLAGS="$CXXFLAGS -I{stage_dir}/include '
"-I{stage_dir}/usr/include "
"-I{stage_dir}/include/{arch_triplet} "
'-I{stage_dir}/usr/include/{arch_triplet}"'.format(
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
in environment,
"Current environment is {!r}".format(environment),
)
self.assertTrue(
'LDFLAGS="$LDFLAGS -L{stage_dir}/lib -L{stage_dir}/usr/lib '
"-L{stage_dir}/lib/{arch_triplet} "
'-L{stage_dir}/usr/lib/{arch_triplet}"'.format(
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
in environment,
"Current environment is {!r}".format(environment),
)
self.assertTrue(
'PERL5LIB="{}/usr/share/perl5/"'.format(self.stage_dir) in environment
)
def test_parts_build_env_ordering_with_deps(self):
snapcraft_yaml = dedent(
"""\
name: test
version: "1"
summary: test
description: test
confinement: strict
grade: stable
parts:
part1:
plugin: nil
part2:
plugin: nil
after: [part1]
"""
)
self.useFixture(fixtures.EnvironmentVariable("PATH", "/bin"))
arch_triplet = snapcraft.ProjectOptions().arch_triplet
self.maxDiff = None
paths = [
os.path.join(self.stage_dir, "lib"),
os.path.join(self.stage_dir, "lib", arch_triplet),
os.path.join(self.stage_dir, "usr", "lib"),
os.path.join(self.stage_dir, "usr", "lib", arch_triplet),
os.path.join(self.stage_dir, "include"),
os.path.join(self.stage_dir, "usr", "include"),
os.path.join(self.stage_dir, "include", arch_triplet),
os.path.join(self.stage_dir, "usr", "include", arch_triplet),
os.path.join(self.parts_dir, "part1", "install", "include"),
os.path.join(self.parts_dir, "part1", "install", "lib"),
os.path.join(self.parts_dir, "part2", "install", "include"),
os.path.join(self.parts_dir, "part2", "install", "lib"),
]
for path in paths:
os.makedirs(path)
project_config = self.make_snapcraft_project(snapcraft_yaml)
part2 = [
part for part in project_config.parts.all_parts if part.name == "part2"
][0]
env = project_config.parts.build_env_for_part(part2)
env_lines = "\n".join(["export {}\n".format(e) for e in env])
shell_env = {
"CFLAGS": "-I/user-provided",
"CXXFLAGS": "-I/user-provided",
"CPPFLAGS": "-I/user-provided",
"LDFLAGS": "-L/user-provided",
"LD_LIBRARY_PATH": "/user-provided",
}
def get_envvar(envvar):
with tempfile.NamedTemporaryFile(mode="w+") as f:
f.write(env_lines)
f.write("echo ${}".format(envvar))
f.flush()
output = subprocess.check_output(["/bin/sh", f.name], env=shell_env)
return output.decode(sys.getfilesystemencoding()).strip()
expected_cflags = (
"-I/user-provided "
"-I{parts_dir}/part2/install/include -I{stage_dir}/include "
"-I{stage_dir}/usr/include "
"-I{stage_dir}/include/{arch_triplet} "
"-I{stage_dir}/usr/include/{arch_triplet}".format(
parts_dir=self.parts_dir,
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
)
self.assertThat(get_envvar("CFLAGS"), Equals(expected_cflags))
self.assertThat(get_envvar("CXXFLAGS"), Equals(expected_cflags))
self.assertThat(get_envvar("CPPFLAGS"), Equals(expected_cflags))
self.assertThat(
get_envvar("LDFLAGS"),
Equals(
"-L/user-provided "
"-L{parts_dir}/part2/install/lib -L{stage_dir}/lib "
"-L{stage_dir}/usr/lib -L{stage_dir}/lib/{arch_triplet} "
"-L{stage_dir}/usr/lib/{arch_triplet}".format(
parts_dir=self.parts_dir,
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
),
)
self.assertThat(
get_envvar("LD_LIBRARY_PATH"),
Equals(
"/user-provided:"
"{parts_dir}/part2/install/lib:"
"{stage_dir}/lib:"
"{stage_dir}/usr/lib:"
"{stage_dir}/lib/{arch_triplet}:"
"{stage_dir}/usr/lib/{arch_triplet}".format(
parts_dir=self.parts_dir,
stage_dir=self.stage_dir,
arch_triplet=project_config.project.arch_triplet,
)
),
)
@mock.patch("multiprocessing.cpu_count", return_value=42)
def test_parts_build_env_contains_parallel_build_count(self, cpu_mock):
project_config = self.make_snapcraft_project(self.snapcraft_yaml)
part1 = [
part for part in project_config.parts.all_parts if part.name == "part1"
][0]
env = project_config.parts.build_env_for_part(part1)
self.assertThat(env, Contains('SNAPCRAFT_PARALLEL_BUILD_COUNT="42"'))
|
from rest_framework import viewsets, mixins
from .models import Article
from .serializers import ArticleSerializer, ArticleDetailSerializer
class ArticlesViewSet (viewsets.ReadOnlyModelViewSet):
serializer_class = ArticleSerializer
queryset = Article.objects
def list(self, request):
response = super(__class__, self).list(request)
response['Access-Control-Allow-Origin'] = 'http://192.168.18.107:4200' # TODO: REMOVE, TESTING ONLY
return response
class PublishedArticlesViewSet (mixins.ListModelMixin,
viewsets.GenericViewSet):
serializer_class = ArticleSerializer
queryset = Article.objects.filter(published=True).order_by('-published_date')
def list(self, request):
response = super(__class__, self).list(request)
response['Access-Control-Allow-Origin'] = 'http://192.168.18.107:4200' # TODO: REMOVE, TESTING ONLY
return response
class PublishedArticlesDetailViewSet (mixins.RetrieveModelMixin,
viewsets.GenericViewSet):
serializer_class = ArticleDetailSerializer
queryset = Article.objects.filter(published=True).order_by('-published_date')
def retrieve(self, request, *args, **kwargs):
response = super(__class__, self).retrieve(request, *args, **kwargs)
response['Access-Control-Allow-Origin'] = 'http://192.168.18.107:4200' # TODO: REMOVE, TESTING ONLY
return response
|
def _is_iter(val):
"Checks if value is a list or tuple"
return isinstance(val, tuple) or isinstance(val, list)
def _iter_join(val):
"Joins values of iterable parameters for the fancy view, unless it equals None, then blank"
return "(" + ", ".join(["{:6g}".format(v) for v in val]) + ")" if val is not None else ""
def _non_iter(val):
"Returns formatted string for a value unless it equals None, then blank"
return "{:6g}".format(val) if val is not None else ""
class current_component_values():
"""Convenience class that makes use of __repr__ methods for nice printing in the notebook
of the properties of parameters of a component
Parameters
----------
component : hyperspy component instance
only_free : bool, default False
If True: Only include the free parameters in the view
only_active : bool, default False
If True: Helper for current_model_values. Only include active components in the view.
Always shows values if used on an individual component.
"""
def __init__(self, component, only_free=False, only_active=False):
self.name = component.name
self.active = component.active
self.parameters = component.parameters
self._id_name = component._id_name
self.only_free = only_free
self.only_active = only_active
def __repr__(self):
# Number of digits for each label for the terminal-style view.
size = {
'name': 14,
'free': 5,
'value': 10,
'std': 10,
'bmin': 10,
'bmax': 10,
}
# Using nested string formatting for flexibility in future updates
signature = "{{:>{name}}} | {{:>{free}}} | {{:>{value}}} | {{:>{std}}} | {{:>{bmin}}} | {{:>{bmax}}}".format(
**size)
if self.only_active:
text = "{0}: {1}".format(self.__class__.__name__, self.name)
else:
text = "{0}: {1}\nActive: {2}".format(
self.__class__.__name__, self.name, self.active)
text += "\n"
text += signature.format("Parameter Name",
"Free", "Value", "Std", "Min", "Max")
text += "\n"
text += signature.format("=" * size['name'], "=" * size['free'], "=" *
size['value'], "=" * size['std'], "=" * size['bmin'], "=" * size['bmax'],)
text += "\n"
for para in self.parameters:
if not self.only_free or self.only_free and para.free:
if _is_iter(para.value):
# iterables (polynomial.value) must be handled separately
# `blank` results in a column of spaces
blank = len(para.value) * ['']
std = para.std if _is_iter(para.std) else blank
bmin = para.bmin if _is_iter(para.bmin) else blank
bmax = para.bmax if _is_iter(para.bmax) else blank
for i, (v, s, bn, bx) in enumerate(
zip(para.value, std, bmin, bmax)):
if i == 0:
text += signature.format(para.name[:size['name']], str(para.free)[:size['free']], str(
v)[:size['value']], str(s)[:size['std']], str(bn)[:size['bmin']], str(bx)[:size['bmax']])
else:
text += signature.format("", "", str(v)[:size['value']], str(
s)[:size['std']], str(bn)[:size['bmin']], str(bx)[:size['bmax']])
text += "\n"
else:
text += signature.format(para.name[:size['name']], str(para.free)[:size['free']], str(para.value)[
:size['value']], str(para.std)[:size['std']], str(para.bmin)[:size['bmin']], str(para.bmax)[:size['bmax']])
text += "\n"
return text
def _repr_html_(self):
if self.only_active:
text = "<p><b>{0}: {1}</b></p>".format(self.__class__.__name__, self.name)
else:
text = "<p><b>{0}: {1}</b><br />Active: {2}</p>".format(
self.__class__.__name__, self.name, self.active)
para_head = """<table style="width:100%"><tr><th>Parameter Name</th><th>Free</th>
<th>Value</th><th>Std</th><th>Min</th><th>Max</th></tr>"""
text += para_head
for para in self.parameters:
if not self.only_free or self.only_free and para.free:
if _is_iter(para.value):
# iterables (polynomial.value) must be handled separately
# This should be removed with hyperspy 2.0 as Polynomial
# has been replaced.
value = _iter_join(para.value)
std = _iter_join(para.std)
bmin = _iter_join(para.bmin)
bmax = _iter_join(para.bmax)
else:
value = _non_iter(para.value)
std = _non_iter(para.std)
bmin = _non_iter(para.bmin)
bmax = _non_iter(para.bmax)
text += """<tr><td>{0}</td><td>{1}</td><td>{2}</td>
<td>{3}</td><td>{4}</td><td>{5}</td></tr>""".format(
para.name, para.free, value, std, bmin, bmax)
text += "</table>"
return text
class current_model_values():
"""Convenience class that makes use of __repr__ methods for nice printing in the notebook
of the properties of parameters in components in a model
Parameters
----------
component : hyperspy component instance
only_free : bool, default False
If True: Only include the free parameters in the view
only_active : bool, default False
If True: Only include active parameters in the view
"""
def __init__(self, model, only_free=False, only_active=False, component_list=None):
self.model = model
self.only_free = only_free
self.only_active = only_active
self.component_list = model if component_list == None else component_list
self.model_type = str(self.model.__class__).split("'")[1].split('.')[-1]
def __repr__(self):
text = "{}: {}\n".format(
self.model_type, self.model.signal.metadata.General.title)
for comp in self.component_list:
if not self.only_active or self.only_active and comp.active:
if not self.only_free or comp.free_parameters and self.only_free:
text += current_component_values(
component=comp,
only_free=self.only_free,
only_active=self.only_active
).__repr__() + "\n"
return text
def _repr_html_(self):
html = "<h4>{}: {}</h4>".format(self.model_type,
self.model.signal.metadata.General.title)
for comp in self.component_list:
if not self.only_active or self.only_active and comp.active:
if not self.only_free or comp.free_parameters and self.only_free:
html += current_component_values(
component=comp,
only_free=self.only_free,
only_active=self.only_active
)._repr_html_()
return html
|
from __future__ import print_function
from openturns import *
from otfftw import *
from time import *
myFFTW = FFTW()
print("myFFTW=", myFFTW)
size = 8
data = ComplexCollection(size)
for i in range(size):
data[i] = (i + 1.0) - 0.2j * (i + 1.0)
print("data=", data)
result = myFFTW.transform(data)
print("result=", result)
print("ref =", KissFFT().transform(data))
print("back=", myFFTW.inverseTransform(result))
|
from flask import render_template, request, Blueprint, session, url_for
from sqlalchemy import func
from app import mod_public, db, api
from app.mod_public.forms import ContactForm, SearchForm
from app.models import Book, Bid, Auction
mod_public = Blueprint('public', __name__, url_prefix='/')
@mod_public.route('/')
@mod_public.route('home/', methods=['GET', 'POST'])
def home():
form = SearchForm()
books = db.session.query(Book).all()
if request.method == 'POST':
return render_template('public/home.html', form=form, books=books)
else:
return render_template('public/home.html', form=form, books=books)
@mod_public.route('about/')
def about():
return render_template('public/about.html')
@mod_public.route('contact/', methods=['GET', 'POST'])
def contact():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
return render_template('public/contact.html', form=form)
else:
# << Mail method not implemented >>
return 'Form posted.'
elif request.method == 'GET':
return render_template('public/contact.html', form=form)
@mod_public.route('search/', methods=['Get', 'Post'])
def search():
form = SearchForm()
if request.method == 'POST':
books = Book.query.filter(Book.title == form.title.data)
return render_template('public/search.html', books=books, form=form)
else:
return render_template('public/search.html', form=form)
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
from cmsplugin_youtube.models import YouTube as YouTubeModel
import re
class YouTubePlugin(CMSPluginBase):
model = YouTubeModel
name = _("YouTube")
render_template = "cmsplugin_youtube/embed_jwp.html"
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder,
'video_id': self.extract_video_id(instance.video_url)
})
return context
def extract_video_id(self, video_url):
m = re.search(r"youtube\.com/.*v=([^&]*)", video_url)
try:
video_id = m.group(1)
except Exception, e:
print e
video_id = None
return video_id
plugin_pool.register_plugin(YouTubePlugin)
|
BCM = "123"
IN = 0
OUT = 1
HIGH = 1
LOW = 0
print "GPIO Simulate"
def setmode(a):
pass
def setup(a,b):
pass
def input(a):
return 0
def output(a,b):
pass
class PWM:
def __init__(self,a,b):
pass
def start(self,a):
pass
def ChangeDutyCycle(self,a):
pass
|
import numpy as np
from pysisyphus.calculators.AnaPotBase import AnaPotBase
class Rosenbrock(AnaPotBase):
def __init__(self):
V_str = "(1-x)**2 + 100*(y - x**2)**2"
xlim = (-2.5, 2.5)
ylim = (-1.5, 3.5)
levels = np.logspace(-5, 10, 50, base=2)
super().__init__(V_str=V_str, xlim=xlim, ylim=ylim, levels=levels)
def __str__(self):
return "Rosenbrock calculator"
|
import unittest
import unittest.mock
from kobato.prompt import Prompt, PromptException, confirm
class TestPrompt(unittest.TestCase):
def test_basic_input(self):
p = Prompt(allow_multiple=False, case_sensitive=False)
p.add_action('y', help='Yes, submit current post to point.im and remove draft')
p.add_action('n', help='No, save draft and exit')
p.add_action('e', help='Run editor to edit post')
# p.add_help()
with unittest.mock.patch('builtins.input', lambda: 'y'):
for i in range(0, 10):
self.assertEqual(p.input(), ['y'])
def helper(t=['n', 'd', 'h', '123', '', 'asdasdasdasdasd', '\n\n\n', '``', '\'', '+', 'D']):
if not len(t):
raise Exception('This should not happen')
return t.pop()
with unittest.mock.patch('builtins.input', helper):
self.assertEqual(p.input(), ['n'])
with unittest.mock.patch('builtins.input', lambda: 'N'):
self.assertEqual(p.input(), ['n'])
def test_multiple_input(self):
p = Prompt(allow_multiple=True, case_sensitive=True)
p.add_action('r', help='Recommend without comment', conflicts=['R'])
p.add_action('R', help='Recommend with comment', conflicts=['r'])
p.add_action('f', help='Favorite post')
p.add_action('u', help='After all actions unsub from the post')
# p.add_help()
with unittest.mock.patch('builtins.input', lambda: 'rfu'):
res = p.input()
for c in 'rfu':
self.assertIn(c, res)
def helper(t=['Rfu', '+', '"', '213454524', '', 'ASDASD', 'RrRrRrR', 'rR', '']):
if not len(t):
raise Exception('This should not happen')
return t.pop()
with unittest.mock.patch('builtins.input', helper):
res = p.input()
for c in 'Rfu':
self.assertIn(c, res)
def test_default_option(self):
p = Prompt(allow_multiple=False, case_sensitive=False)
p.add_action('y', help='Yes')
p.add_action('n', help='No', default=True)
with unittest.mock.patch('builtins.input', lambda: ''):
res = p.input()
self.assertEqual(res, ['n'])
with unittest.mock.patch('builtins.input', lambda: 'Y'):
res = p.input()
self.assertEqual(res, ['y'])
with self.assertRaises(PromptException):
p.add_action('N', help='NOOOO')
with self.assertRaises(PromptException):
p.add_action('y', help='still nooooo')
with self.assertRaises(PromptException):
p.add_action('n', help='yep, still no')
def test_inline_help(self):
p = Prompt(allow_multiple=False, case_sensitive=False, text='Please confirm')
p.add_action('y')
p.add_action('n', default=True)
self.assertEqual(p._inline_help(), "y|N|?")
@unittest.mock.patch('builtins.input')
def test_confirm_action(self, input_):
input_.return_value = ''
self.assertEqual(confirm(default='yes'), ['y'])
input_.return_value = 'n'
self.assertEqual(confirm(default='yes'), ['n'])
input_.return_value = 'y'
self.assertEqual(confirm(), ['y'])
input_.return_value = 'n'
self.assertEqual(confirm(), ['n'])
input_.return_value = 'y'
self.assertEqual(confirm(additional={'e': 'Edit'}), ['y'])
input_.return_value = 'e'
self.assertEqual(confirm(additional={'e': 'Edit'}), ['e'])
input_.return_value = ''
self.assertEqual(confirm(additional={'e': 'Edit'}, default='e'), ['e'])
|
from omsdk.sdkcenum import EnumWrapper, TypeHelper
TypeState = EnumWrapper('TMS', {
'UnInitialized' : 'UnInitialized',
'Initializing' : 'Initializing',
'Precommit' : 'Precommit',
'Committed' : 'Committed',
'Changing' : 'Changing',
}).enum_type
class TypeBase(object):
def __init__(self):
pass
|
import traceback
import sys
import ConfigParser
import re
import os
import netifaces
from IPy import IP
MANAGED_INTERFACE_NAMES = ('eth', 'br', 'ens', 'enp', 'eno')
def formatExceptionInfo(maxTBlevel=5):
cla, exc, trbk = sys.exc_info()
excName = cla.__name__
try:
excArgs = exc.__dict__["args"]
except KeyError:
excArgs = "<no args>"
excTb = traceback.format_tb(trbk, maxTBlevel)
return str(excName + " " + excArgs + " : \n" + excTb[0])
def getINIoption(section, option, ini):
config = ConfigParser.SafeConfigParser()
config.read(ini)
return config.get(section, option)
def get_config_option(file, option):
"""
Returns the option value of a config file formatted like:
OPTION=value
"""
if os.path.exists(file):
with open(file) as f:
string = f.read()
expr = re.compile("\s*%s\s*=\s*(.*)" % option, re.M)
match = expr.search(string)
if match and match.group(1):
return match.group(1).strip()
else:
return ""
else:
raise OSError("File not found")
def grep(search, file):
if os.path.exists(file):
with open(file) as f:
string = f.read()
expr = re.compile(search, re.M)
if expr.search(string):
return True
else:
return False
else:
return False
def ethernet_ifs():
ifs = []
for interface in netifaces.interfaces():
if interface.startswith(MANAGED_INTERFACE_NAMES):
if_detail = netifaces.ifaddresses(interface)
# check if interface is configured
if netifaces.AF_INET in if_detail:
addr = if_detail[netifaces.AF_INET][0]['addr']
netmask = if_detail[netifaces.AF_INET][0]['netmask']
network = IP(addr).make_net(netmask).strNormal(0)
ifs.append([interface, addr, network, netmask])
return ifs
def get_domain():
"""
Return the current domain
"""
from socket import getfqdn
try:
domain = '.'.join(getfqdn().split('.')[1:])
except:
domain = _("", "system")
return domain
class Singleton(type):
def __init__(cls, name, bases, dict):
super(Singleton, cls).__init__(name, bases, dict)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(Singleton, cls).__call__(*args, **kw)
return cls.instance
|
"""
A module containing definitions for base plugin classes for swk.
swk - A tiny extendable utility for running commands against multiple hosts.
Copyright (C) 2016 Pavel "trueneu" Gurkov
see swk/main.py for more information on License and contacts
"""
import abc
import shlex
import logging
class SWKPlugin(object):
"""
A base class in SWK plugins hierarchy.
This class is never used neither directly nor when defining plugins.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, *args, **kwargs):
"""
Constructor:
just writes all the kwargs passed to class attributes.
"""
for k, v in kwargs.items():
setattr(self, "_{0}".format(k), v)
class SWKCommandPlugin(SWKPlugin):
"""
A class used to build command plugins.
You must derive from this class for your plugin to be recognized as a command executer.
_commands is class attr, a dict of dicts with the following structure:
{"command_name1" : {"requires_hostlist": True/False, "help": str},
"command_name2" : {"requires_hostlist": True/False, "help": str}
...
}
where "requires_hostlist" is whether command works with hosts
"help" is a help message which is shown in shell mode when user issues 'help <command_name>' command.
_commands_help_message is a class attr, a str
which should contain plugin's name and a brief description of all the commands included
"""
_commands = dict() #: this
_commands_help_message = ""
@staticmethod
def _shlex_quoting_split(string):
lex = shlex.shlex(string)
lex.quotes = "'"
lex.whitespace_split = True
lex.commenters = ''
return list(lex)
def __init__(self, *args, **kwargs):
"""
Constructor.
Additional object attributes will be passed when constructing the object:
self._command contains the string with command invoked
self._hostlist contains the expanded hostlist if it's required by the command
self._command_args contains the remainder of command line
everything mentioned in config file will also be available as self._key = value
the config file section MUST be named the same as your plugin class
special variables that might be useful:
self._swk_dir, self._swk_path - directory and invokation path to main executable
self._cwd - current working directory at the time main executable was invoked
self._cache_directory - a directory where you can store anything related to your module work
"""
super(SWKCommandPlugin, self).__init__(*args, **kwargs)
shlex_splitted_command_args = list()
if hasattr(self, "_command_args") and len(self._command_args) > 0 and (self._command_args[0] is not None):
logging.debug("self._command_args received: {cmdargs}".format(cmdargs=self._command_args))
for command_arg in self._command_args:
shlex_splitted_command_args.extend(self._shlex_quoting_split(command_arg))
self._command_args = shlex_splitted_command_args
@classmethod
def get_commands(cls):
return cls._commands.keys()
@classmethod
def get_command_help(cls, command):
return cls._commands.get(command).get('help', 'no help provided\n')
@classmethod
def requires_hostlist(cls, command):
return cls._commands.get(command).get('requires_hostlist', True)
@classmethod
def commands_help(cls):
return cls._commands_help_message
@abc.abstractmethod
def run_command(self):
"""
This method is called when SWK has done all the preparations and needs the plugin to
actually execute the command.
You should redefine it.
Here you should determine which command is called and process it.
"""
class SWKParserPlugin(SWKPlugin):
"""
A class used to build parser plugins.
You must derive from this class for your plugin to be recognized as a hostlist parser.
_parsers is class attr, a dict of dicts with the following structure:
{"hostlist_modifier1" : {"help": str},
"hostlist_modifier2" : {"help": str},}
...
}
where "help" is a help message which is shown in shell mode when user issues 'help <hostlist_modifier>' command.
_parsers_help_message is a class attr, a str
which should contain a brief description of all the hostlist modifiers available
"""
_parsers = dict()
_parsers_help_message = ""
def __init__(self, *args, **kwargs):
"""
Constructor.
Additional object attributes will be passed when constructing the object:
self._hostgroup contains string with hostgroup to be parsed
self._hostgroup_modifier contains the parser specifier, a single symbol
everything mentioned in config file will also be available as self._key = value
the config file section MUST be named the same as your plugin class
"""
super(SWKParserPlugin, self).__init__(*args, **kwargs)
@classmethod
def get_parsers(cls):
return cls._parsers.keys()
@classmethod
def get_parser_help(cls, parser):
return cls._parsers.get(parser).get('help', 'no help provided\n')
@abc.abstractmethod
def parse(self):
"""
This method is called when SWK needs the plugin to parse a hostlist expression.
You should redefine it.
Here you should determine which modifier is used and act accordingly.
:return: Must return list() of hostnames.
"""
@classmethod
def parsers_help(cls):
return cls._parsers_help_message
class SWKParsingError(Exception):
"""
A class to derive your parsing errors from.
Raise it if there's an unrecoverable error when parsing a host expression to stop the command from executing.
"""
def __init__(self, message):
"""
Constructor
:param message: a message to be displayed
"""
super(SWKParsingError, self).__init__(message)
class SWKCommandError(Exception):
"""
A class to derive your command execution errors from.
Raise it if there's an unrecoverable error when executing a command.
"""
def __init__(self, message):
"""
Constructor
:param message: a message to be displayed
"""
super(SWKCommandError, self).__init__(message)
|
from bs4 import BeautifulSoup
import os
import re
import requests
import subprocess
import sys
import tabulate
class OutColors:
DEFAULT = '\033[0m'
BW = '\033[1m'
LG = '\033[0m\033[32m'
LR = '\033[0m\033[31m'
SEEDER = '\033[1m\033[32m'
LEECHER = '\033[1m\033[31m'
def helper():
print(OutColors.DEFAULT + "\nSearch torrents from Kickass.to ;)")
def select_torrent():
torrent = input('>> ')
return torrent
def download_torrent(url):
fname = os.getcwd() + '/' + url.split('title=')[-1] + '.torrent'
try:
schema = ('https:')
headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0' }
r = requests.get(schema + url, headers=headers, stream=True)
with open(fname, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
except requests.exceptions.RequestException as e:
print('\n' + OutColors.LR + str(e))
sys.exit(1)
return fname
def aksearch():
helper()
tmp_url = 'https://kickasstorrents.to/usearch/'
query = input('Type query: ')
url = tmp_url + query + '/'
try:
cont = requests.get(url)
except requests.exceptions.RequestException as e:
raise SystemExit('\n' + OutColors.LR + str(e))
# check if no torrents found
if not re.findall(r'Download torrent file', str(cont.content)):
print('Torrents found: 0')
aksearch()
else:
soup = BeautifulSoup(cont.content, 'lxml')
# to use by age, seeders, and leechers
# sample:
# 700.46 MB
# 5
# 2 years
# 1852
# 130
al = [s.get_text() for s in soup.find_all('td', {'class':'center'})]
href = [a.get('href') for a in soup.find_all('a', {'title':'Download torrent file'})]
size = [t.get_text() for t in soup.find_all('td', {'class':'nobr'}) ]
title = [ti.get_text() for ti in soup.find_all('a', {'class':'cellMainLink'})]
age = al[2::5]
seeders = al[3::5]
leechers = al[4::5]
# for table printing
table = [[OutColors.BW + str(i+1) + OutColors.DEFAULT if (i+1) % 2 == 0 else i+1,
OutColors.BW + title[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else title[i],
OutColors.BW + size[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else size[i],
OutColors.BW + age[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else age[i],
OutColors.SEEDER + seeders[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else OutColors.LG + seeders[i] + OutColors.DEFAULT,
OutColors.LEECHER + leechers[i] + OutColors.DEFAULT if (i+1) % 2 == 0 else OutColors.LR + leechers[i] + OutColors.DEFAULT] for i in range(len(href))]
print()
print(tabulate.tabulate(table, headers=['No', 'Title', 'Size', 'Age', 'Seeders', 'Leechers']))
# torrent selection
if len(href) == 1:
torrent = 1
else:
print('\nSelect torrent: [ 1 - ' + str(len(href)) + ' ] or [ M ] to go back to main menu or [ Q ] to quit')
torrent = select_torrent()
if torrent == 'Q' or torrent == 'q':
sys.exit(0)
elif torrent == 'M' or torrent == 'm':
aksearch()
else:
if int(torrent) <= 0 or int(torrent) > len(href):
print('Use eyeglasses...')
else:
print('Download >> ' + href[int(torrent)-1].split('title=')[-1] + '.torrent')
fname = download_torrent(href[int(torrent)-1])
subprocess.Popen(['xdg-open', fname], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
aksearch()
if __name__ == '__main__':
try:
aksearch()
except KeyboardInterrupt:
print('\nHuha!')
|
from flask import Flask, make_response, request, current_app, jsonify, json
from functools import update_wrapper
from os import path
FILEDIR = ''
def corsDecorator(f):
def new_func(*args, **kwargs):
resp = make_response(f(*args, **kwargs))
resp.cache_control.no_cache = True # Turn off caching
resp.headers['Access-Control-Allow-Origin'] = '*' # Add header to allow CORS
return resp
return update_wrapper(new_func, f)
app = Flask(__name__, static_folder=FILEDIR)
@app.route('/')
def home():
return "RGBWW Update Server"
@app.route('/version.json')
@corsDecorator
def versioninfo():
rom = 0
spiffs = 0
if path.isfile(path.join(FILEDIR, "rom0.bin")):
rom = {}
rom["fw_version"] = "unknown"
rom["url"] = "http://"+request.host+"/rom0.bin"
if path.isfile(path.join(FILEDIR, "spiff_rom.bin")) :
spiffs = {}
spiffs["webapp_version"] = "unknown"
spiffs["url"] = "http://"+request.host+"/spiff_rom.bin"
if rom is not 0 and spiffs is not 0:
resp = { "rom": rom, "spiffs": spiffs}
elif rom is not 0:
resp = {"rom" : rom }
elif spiffs is not 0:
resp = { "spiffs" : spiffs }
else:
resp = {}
return json.dumps(resp)
@app.route('/<path:path>')
@corsDecorator
def static_proxy(path):
return app.send_static_file(path)
if __name__ == '__main__':
app.run(debug=True, host="0.0.0.0", port=80)
|
"""
Defines the behavior of Psychopy's Builder view window
Part of the PsychoPy library
Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
Distributed under the terms of the GNU General Public License (GPL).
"""
from __future__ import absolute_import, division, print_function
import os, sys
import glob
import copy
import traceback
import codecs
import numpy
from pkg_resources import parse_version
import wx.stc
from wx.lib import scrolledpanel
from wx.lib import platebtn
import wx.lib.agw.aui as aui # some versions of phoenix
try:
from wx.adv import PseudoDC
except ImportError:
from wx import PseudoDC
if parse_version(wx.__version__) < parse_version('4.0.3'):
wx.NewIdRef = wx.NewId
try:
from queue import Queue, Empty
except ImportError:
from Queue import Queue, Empty # python 2.x
from psychopy.localization import _translate
from ... import experiment, prefs
from .. import dialogs, icons
from ..themes import IconCache, ThemeMixin
from ..themes._themes import PsychopyDockArt, PsychopyTabArt, ThemeSwitcher
from psychopy import logging, constants, data
from psychopy.tools.filetools import mergeFolder
from .dialogs import (DlgComponentProperties, DlgExperimentProperties,
DlgCodeComponentProperties, DlgLoopProperties)
from ..utils import (PsychopyToolbar, PsychopyPlateBtn, WindowFrozen,
FileDropTarget, FrameSwitcher)
from psychopy.experiment import components
from builtins import str
from psychopy.app import pavlovia_ui
from psychopy.projects import pavlovia
from psychopy.scripts.psyexpCompile import generateScript
_localized = {
'Field': _translate('Field'),
'Default': _translate('Default'),
'Favorites': _translate('Favorites'),
'Stimuli': _translate('Stimuli'),
'Responses': _translate('Responses'),
'Custom': _translate('Custom'),
'I/O': _translate('I/O'),
'Add to favorites': _translate('Add to favorites'),
'Remove from favorites': _translate('Remove from favorites'),
# contextMenuLabels
'edit': _translate('edit'),
'remove': _translate('remove'),
'copy': _translate('copy'),
'move to top': _translate('move to top'),
'move up': _translate('move up'),
'move down': _translate('move down'),
'move to bottom': _translate('move to bottom')
}
cs = ThemeMixin.appColors
class BuilderFrame(wx.Frame, ThemeMixin):
"""Defines construction of the Psychopy Builder Frame"""
def __init__(self, parent, id=-1, title='PsychoPy (Experiment Builder)',
pos=wx.DefaultPosition, fileName=None, frameData=None,
style=wx.DEFAULT_FRAME_STYLE, app=None):
if (fileName is not None) and (type(fileName) == bytes):
fileName = fileName.decode(sys.getfilesystemencoding())
self.app = app
self.dpi = self.app.dpi
# things the user doesn't set like winsize etc:
self.appData = self.app.prefs.appData['builder']
# things about the builder that the user can set:
self.prefs = self.app.prefs.builder
self.appPrefs = self.app.prefs.app
self.paths = self.app.prefs.paths
self.frameType = 'builder'
self.filename = fileName
self.htmlPath = None
self.project = None # type: pavlovia.PavloviaProject
self.btnHandles = {} # stores toolbar buttons so they can be altered
self.scriptProcess = None
self.stdoutBuffer = None
self.generateScript = generateScript
if fileName in self.appData['frames']:
self.frameData = self.appData['frames'][fileName]
else: # work out a new frame size/location
dispW, dispH = self.app.getPrimaryDisplaySize()
default = self.appData['defaultFrame']
default['winW'] = int(dispW * 0.75)
default['winH'] = int(dispH * 0.75)
if default['winX'] + default['winW'] > dispW:
default['winX'] = 5
if default['winY'] + default['winH'] > dispH:
default['winY'] = 5
self.frameData = dict(self.appData['defaultFrame']) # copy
# increment default for next frame
default['winX'] += 10
default['winY'] += 10
# we didn't have the key or the win was minimized / invalid
if self.frameData['winH'] == 0 or self.frameData['winW'] == 0:
self.frameData['winX'], self.frameData['winY'] = (0, 0)
if self.frameData['winY'] < 20:
self.frameData['winY'] = 20
wx.Frame.__init__(self, parent=parent, id=id, title=title,
pos=(int(self.frameData['winX']), int(
self.frameData['winY'])),
size=(int(self.frameData['winW']), int(
self.frameData['winH'])),
style=style)
self.Bind(wx.EVT_CLOSE, self.closeFrame)
#self.panel = wx.Panel(self)
# detect retina displays (then don't use double-buffering)
self.isRetina = self.GetContentScaleFactor() != 1
self.SetDoubleBuffered(not self.isRetina)
# create icon
if sys.platform != 'darwin':
# doesn't work on darwin and not necessary: handled by app bundle
iconFile = os.path.join(self.paths['resources'], 'builder.ico')
if os.path.isfile(iconFile):
self.SetIcon(wx.Icon(iconFile, wx.BITMAP_TYPE_ICO))
# create our panels
self.flowPanel = FlowPanel(frame=self)
self.routinePanel = RoutinesNotebook(self)
self.componentButtons = ComponentsPanel(self)
# menus and toolbars
self.toolbar = PsychopyToolbar(frame=self)
self.SetToolBar(self.toolbar)
self.makeMenus()
self.CreateStatusBar()
self.SetStatusText("")
# setup universal shortcuts
accelTable = self.app.makeAccelTable()
self.SetAcceleratorTable(accelTable)
# setup a default exp
if fileName is not None and os.path.isfile(fileName):
self.fileOpen(filename=fileName, closeCurrent=False)
else:
self.lastSavedCopy = None
# don't try to close before opening
self.fileNew(closeCurrent=False)
self.updateReadme()
# control the panes using aui manager
self._mgr = aui.AuiManager(self)
#self._mgr.SetArtProvider(PsychopyDockArt())
#self._art = self._mgr.GetArtProvider()
# Create panels
self._mgr.AddPane(self.routinePanel,
aui.AuiPaneInfo().
Name("Routines").Caption("Routines").CaptionVisible(True).
CloseButton(False).MaximizeButton(True).PaneBorder(False).
Center()) # 'center panes' expand
rtPane = self._mgr.GetPane('Routines')
self._mgr.AddPane(self.componentButtons,
aui.AuiPaneInfo().
Name("Components").Caption("Components").CaptionVisible(True).
RightDockable(True).LeftDockable(True).
CloseButton(False).PaneBorder(False).
Right())
compPane = self._mgr.GetPane('Components')
self._mgr.AddPane(self.flowPanel,
aui.AuiPaneInfo().
Name("Flow").Caption("Flow").CaptionVisible(True).
BestSize((8 * self.dpi, 2 * self.dpi)).
RightDockable(True).LeftDockable(True).
CloseButton(False).PaneBorder(False).
Bottom())
flowPane = self._mgr.GetPane('Flow')
# Arrange panes
if self.prefs['topFlow']:
flowPane.Top()
compPane.Left()
rtPane.CenterPane()
rtPane.CaptionVisible(True)
# tell the manager to 'commit' all the changes just made
self._mgr.Update()
# self.SetSizer(self.mainSizer) # not necessary for aui type controls
if self.frameData['auiPerspective']:
self._mgr.LoadPerspective(self.frameData['auiPerspective'])
self.SetMinSize(wx.Size(600, 400)) # min size for the whole window
self.SetSize(
(int(self.frameData['winW']), int(self.frameData['winH'])))
self.SendSizeEvent()
self._mgr.Update()
# self.SetAutoLayout(True)
self.Bind(wx.EVT_CLOSE, self.closeFrame)
self.app.trackFrame(self)
self.SetDropTarget(FileDropTarget(targetFrame=self))
self._applyAppTheme()
# def _applyAppTheme(self, target=None):
# # self.SetArtProvider(PsychopyDockArt())
# for c in self.GetChildren():
# if hasattr(c, '_applyAppTheme'):
# c._applyAppTheme()
# self.Refresh()
# self.Update()
# Synonymise Aui manager for use with theme mixin
def GetAuiManager(self):
return self._mgr
def makeMenus(self):
"""
Produces Menus for the Builder Frame
"""
# ---Menus---#000000#FFFFFF-------------------------------------------
menuBar = wx.MenuBar()
# ---_file---#000000#FFFFFF-------------------------------------------
self.fileMenu = wx.Menu()
menuBar.Append(self.fileMenu, _translate('&File'))
# create a file history submenu
self.fileHistoryMaxFiles = 10
self.fileHistory = wx.FileHistory(maxFiles=self.fileHistoryMaxFiles)
self.recentFilesMenu = wx.Menu()
self.fileHistory.UseMenu(self.recentFilesMenu)
for filename in self.appData['fileHistory']:
if os.path.exists(filename):
self.fileHistory.AddFileToHistory(filename)
self.Bind(wx.EVT_MENU_RANGE, self.OnFileHistory,
id=wx.ID_FILE1, id2=wx.ID_FILE9)
keys = self.app.keys
menu = self.fileMenu
menu.Append(
wx.ID_NEW,
_translate("&New\t%s") % keys['new'])
menu.Append(
wx.ID_OPEN,
_translate("&Open...\t%s") % keys['open'])
menu.AppendSubMenu(
self.recentFilesMenu,
_translate("Open &Recent"))
menu.Append(
wx.ID_SAVE,
_translate("&Save\t%s") % keys['save'],
_translate("Save current experiment file"))
menu.Append(
wx.ID_SAVEAS,
_translate("Save &as...\t%s") % keys['saveAs'],
_translate("Save current experiment file as..."))
exportMenu = menu.Append(
-1,
_translate("Export HTML...\t%s") % keys['exportHTML'],
_translate("Export experiment to html/javascript file"))
menu.Append(
wx.ID_CLOSE,
_translate("&Close file\t%s") % keys['close'],
_translate("Close current experiment"))
self.Bind(wx.EVT_MENU, self.app.newBuilderFrame, id=wx.ID_NEW)
self.Bind(wx.EVT_MENU, self.fileExport, id=exportMenu.GetId())
self.Bind(wx.EVT_MENU, self.fileSave, id=wx.ID_SAVE)
menu.Enable(wx.ID_SAVE, False)
self.Bind(wx.EVT_MENU, self.fileSaveAs, id=wx.ID_SAVEAS)
self.Bind(wx.EVT_MENU, self.fileOpen, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.commandCloseFrame, id=wx.ID_CLOSE)
self.fileMenu.AppendSeparator()
item = menu.Append(
wx.ID_PREFERENCES,
_translate("&Preferences\t%s") % keys['preferences'])
self.Bind(wx.EVT_MENU, self.app.showPrefs, item)
# item = menu.Append(wx.NewId(), "Plug&ins")
# self.Bind(wx.EVT_MENU, self.pluginManager, item)
menu.AppendSeparator()
msg = _translate("Close PsychoPy Builder")
item = menu.Append(wx.ID_ANY, msg)
self.Bind(wx.EVT_MENU, self.closeFrame, id=item.GetId())
self.fileMenu.AppendSeparator()
self.fileMenu.Append(wx.ID_EXIT,
_translate("&Quit\t%s") % keys['quit'],
_translate("Terminate the program"))
self.Bind(wx.EVT_MENU, self.quit, id=wx.ID_EXIT)
# ------------- edit ------------------------------------
self.editMenu = wx.Menu()
menuBar.Append(self.editMenu, _translate('&Edit'))
menu = self.editMenu
self._undoLabel = menu.Append(wx.ID_UNDO,
_translate("Undo\t%s") % keys['undo'],
_translate("Undo last action"),
wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.undo, id=wx.ID_UNDO)
self._redoLabel = menu.Append(wx.ID_REDO,
_translate("Redo\t%s") % keys['redo'],
_translate("Redo last action"),
wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.redo, id=wx.ID_REDO)
menu.Append(wx.ID_PASTE, _translate("&Paste\t%s") % keys['paste'])
self.Bind(wx.EVT_MENU, self.paste, id=wx.ID_PASTE)
# ---_view---#000000#FFFFFF-------------------------------------------
self.viewMenu = wx.Menu()
menuBar.Append(self.viewMenu, _translate('&View'))
menu = self.viewMenu
item = menu.Append(wx.ID_ANY,
_translate("Open Coder view"),
_translate("Open a new Coder view"))
self.Bind(wx.EVT_MENU, self.app.showCoder, item)
item = menu.Append(wx.ID_ANY,
_translate("Open Runner view"),
_translate("Open the Runner view"))
self.Bind(wx.EVT_MENU, self.app.showRunner, item)
menu.AppendSeparator()
item = menu.Append(wx.ID_ANY,
_translate("&Toggle readme\t%s") % self.app.keys[
'toggleReadme'],
_translate("Toggle Readme"))
self.Bind(wx.EVT_MENU, self.toggleReadme, item)
item = menu.Append(wx.ID_ANY,
_translate("&Flow Larger\t%s") % self.app.keys[
'largerFlow'],
_translate("Larger flow items"))
self.Bind(wx.EVT_MENU, self.flowPanel.increaseSize, item)
item = menu.Append(wx.ID_ANY,
_translate("&Flow Smaller\t%s") % self.app.keys[
'smallerFlow'],
_translate("Smaller flow items"))
self.Bind(wx.EVT_MENU, self.flowPanel.decreaseSize, item)
item = menu.Append(wx.ID_ANY,
_translate("&Routine Larger\t%s") % keys[
'largerRoutine'],
_translate("Larger routine items"))
self.Bind(wx.EVT_MENU, self.routinePanel.increaseSize, item)
item = menu.Append(wx.ID_ANY,
_translate("&Routine Smaller\t%s") % keys[
'smallerRoutine'],
_translate("Smaller routine items"))
self.Bind(wx.EVT_MENU, self.routinePanel.decreaseSize, item)
menu.AppendSeparator()
# Add Theme Switcher
self.themesMenu = ThemeSwitcher(self)
menu.AppendSubMenu(self.themesMenu,
_translate("Themes"))
# ---_tools ---#000000#FFFFFF-----------------------------------------
self.toolsMenu = wx.Menu()
menuBar.Append(self.toolsMenu, _translate('&Tools'))
menu = self.toolsMenu
item = menu.Append(wx.ID_ANY,
_translate("Monitor Center"),
_translate("To set information about your monitor"))
self.Bind(wx.EVT_MENU, self.app.openMonitorCenter, item)
item = menu.Append(wx.ID_ANY,
_translate("Compile\t%s") % keys['compileScript'],
_translate("Compile the exp to a script"))
self.Bind(wx.EVT_MENU, self.compileScript, item)
self.bldrRun = menu.Append(wx.ID_ANY,
_translate("Run\t%s") % keys['runScript'],
_translate("Run the current script"))
self.Bind(wx.EVT_MENU, self.runFile, self.bldrRun, id=self.bldrRun)
item = menu.Append(wx.ID_ANY,
_translate("Send to runner\t%s") % keys['runnerScript'],
_translate("Send current script to runner"))
self.Bind(wx.EVT_MENU, self.runFile, item)
menu.AppendSeparator()
item = menu.Append(wx.ID_ANY,
_translate("PsychoPy updates..."),
_translate("Update PsychoPy to the latest, or a "
"specific, version"))
self.Bind(wx.EVT_MENU, self.app.openUpdater, item)
if hasattr(self.app, 'benchmarkWizard'):
item = menu.Append(wx.ID_ANY,
_translate("Benchmark wizard"),
_translate("Check software & hardware, generate "
"report"))
self.Bind(wx.EVT_MENU, self.app.benchmarkWizard, item)
# ---_experiment---#000000#FFFFFF-------------------------------------
self.expMenu = wx.Menu()
menuBar.Append(self.expMenu, _translate('&Experiment'))
menu = self.expMenu
item = menu.Append(wx.ID_ANY,
_translate("&New Routine\t%s") % keys['newRoutine'],
_translate("Create a new routine (e.g. the trial "
"definition)"))
self.Bind(wx.EVT_MENU, self.addRoutine, item)
item = menu.Append(wx.ID_ANY,
_translate("&Copy Routine\t%s") % keys[
'copyRoutine'],
_translate("Copy the current routine so it can be "
"used in another exp"),
wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.onCopyRoutine, item)
item = menu.Append(wx.ID_ANY,
_translate("&Paste Routine\t%s") % keys[
'pasteRoutine'],
_translate("Paste the Routine into the current "
"experiment"),
wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.onPasteRoutine, item)
item = menu.Append(wx.ID_ANY,
_translate("&Rename Routine\t%s") % keys[
'renameRoutine'],
_translate("Change the name of this routine"))
self.Bind(wx.EVT_MENU, self.renameRoutine, item)
item = menu.Append(wx.ID_ANY,
_translate("Paste Component\t%s") % keys[
'pasteCompon'],
_translate(
"Paste the Component at bottom of the current "
"Routine"),
wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.onPasteCompon, item)
menu.AppendSeparator()
item = menu.Append(wx.ID_ANY,
_translate("Insert Routine in Flow"),
_translate(
"Select one of your routines to be inserted"
" into the experiment flow"))
self.Bind(wx.EVT_MENU, self.flowPanel.onInsertRoutine, item)
item = menu.Append(wx.ID_ANY,
_translate("Insert Loop in Flow"),
_translate("Create a new loop in your flow window"))
self.Bind(wx.EVT_MENU, self.flowPanel.insertLoop, item)
# ---_demos---#000000#FFFFFF------------------------------------------
# for demos we need a dict where the event ID will correspond to a
# filename
self.demosMenu = wx.Menu()
# unpack demos option
menu = self.demosMenu
item = menu.Append(wx.ID_ANY,
_translate("&Unpack Demos..."),
_translate(
"Unpack demos to a writable location (so that"
" they can be run)"))
self.Bind(wx.EVT_MENU, self.demosUnpack, item)
menu.AppendSeparator()
# add any demos that are found in the prefs['demosUnpacked'] folder
self.updateDemosMenu()
menuBar.Append(self.demosMenu, _translate('&Demos'))
# ---_onlineStudies---#000000#FFFFFF-------------------------------------------
self.pavloviaMenu = pavlovia_ui.menu.PavloviaMenu(parent=self)
menuBar.Append(self.pavloviaMenu, _translate("Pavlovia.org"))
# ---_window---#000000#FFFFFF-----------------------------------------
self.windowMenu = FrameSwitcher(self)
menuBar.Append(self.windowMenu,
_translate("Window"))
# ---_help---#000000#FFFFFF-------------------------------------------
self.helpMenu = wx.Menu()
menuBar.Append(self.helpMenu, _translate('&Help'))
menu = self.helpMenu
item = menu.Append(wx.ID_ANY,
_translate("&PsychoPy Homepage"),
_translate("Go to the PsychoPy homepage"))
self.Bind(wx.EVT_MENU, self.app.followLink, item)
self.app.urls[item.GetId()] = self.app.urls['psychopyHome']
item = menu.Append(wx.ID_ANY,
_translate("&PsychoPy Builder Help"),
_translate(
"Go to the online documentation for PsychoPy"
" Builder"))
self.Bind(wx.EVT_MENU, self.app.followLink, item)
self.app.urls[item.GetId()] = self.app.urls['builderHelp']
menu.AppendSeparator()
item = menu.Append(wx.ID_ANY,
_translate("&System Info..."),
_translate("Get system information."))
self.Bind(wx.EVT_MENU, self.app.showSystemInfo, id=item.GetId())
menu.AppendSeparator()
menu.Append(wx.ID_ABOUT, _translate(
"&About..."), _translate("About PsychoPy"))
self.Bind(wx.EVT_MENU, self.app.showAbout, id=wx.ID_ABOUT)
item = menu.Append(wx.ID_ANY,
_translate("&News..."),
_translate("News"))
self.Bind(wx.EVT_MENU, self.app.showNews, id=item.GetId())
self.SetMenuBar(menuBar)
def commandCloseFrame(self, event):
"""Defines Builder Frame Closing Event"""
self.Close()
def closeFrame(self, event=None, checkSave=True):
"""Defines Frame closing behavior, such as checking for file
saving"""
# close file first (check for save) but no need to update view
okToClose = self.fileClose(updateViews=False, checkSave=checkSave)
if not okToClose:
if hasattr(event, 'Veto'):
event.Veto()
return
else:
# as of wx3.0 the AUI manager needs to be uninitialised explicitly
self._mgr.UnInit()
# is it the last frame?
lastFrame = len(self.app.getAllFrames()) == 1
quitting = self.app.quitting
if lastFrame and sys.platform != 'darwin' and not quitting:
self.app.quit(event)
else:
self.app.forgetFrame(self)
self.Destroy() # required
# Show Runner if hidden
if self.app.runner is not None:
self.app.showRunner()
self.app.updateWindowMenu()
def quit(self, event=None):
"""quit the app
"""
self.app.quit(event)
def fileNew(self, event=None, closeCurrent=True):
"""Create a default experiment (maybe an empty one instead)
"""
# Note: this is NOT the method called by the File>New menu item.
# That calls app.newBuilderFrame() instead
if closeCurrent: # if no exp exists then don't try to close it
if not self.fileClose(updateViews=False):
# close the existing (and prompt for save if necess)
return False
self.filename = 'untitled.psyexp'
self.exp = experiment.Experiment(prefs=self.app.prefs)
defaultName = 'trial'
# create the trial routine as an example
self.exp.addRoutine(defaultName)
self.exp.flow.addRoutine(
self.exp.routines[defaultName], pos=1) # add it to flow
# add it to user's namespace
self.exp.namespace.add(defaultName, self.exp.namespace.user)
routine = self.exp.routines[defaultName]
## add an ISI component by default
# components = self.componentButtons.components
# Static = components['StaticComponent']
# ISI = Static(self.exp, parentName=defaultName, name='ISI',
# startType='time (s)', startVal=0.0,
# stopType='duration (s)', stopVal=0.5)
# routine.addComponent(ISI)
self.resetUndoStack()
self.setIsModified(False)
self.updateAllViews()
self.app.updateWindowMenu()
def fileOpen(self, event=None, filename=None, closeCurrent=True):
"""Open a FileDialog, then load the file if possible.
"""
if filename is None:
_wld = "PsychoPy experiments (*.psyexp)|*.psyexp|Any file (*.*)|*"
dlg = wx.FileDialog(self, message=_translate("Open file ..."),
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST,
wildcard=_translate(_wld))
if dlg.ShowModal() != wx.ID_OK:
return 0
filename = dlg.GetPath()
# did user try to open a script in Builder?
if filename.endswith('.py'):
self.app.showCoder() # ensures that a coder window exists
self.app.coder.setCurrentDoc(filename)
self.app.coder.setFileModified(False)
return
with WindowFrozen(ctrl=self):
# try to pause rendering until all panels updated
if closeCurrent:
if not self.fileClose(updateViews=False):
# close the existing (and prompt for save if necess)
return False
self.exp = experiment.Experiment(prefs=self.app.prefs)
try:
self.exp.loadFromXML(filename)
except Exception:
print(u"Failed to load {}. Please send the following to"
u" the PsychoPy user list".format(filename))
traceback.print_exc()
logging.flush()
self.resetUndoStack()
self.setIsModified(False)
self.filename = filename
# routinePanel.addRoutinePage() is done in
# routinePanel.redrawRoutines(), called by self.updateAllViews()
# update the views
self.updateAllViews() # if frozen effect will be visible on thaw
self.updateReadme()
self.fileHistory.AddFileToHistory(filename)
self.htmlPath = None # so we won't accidentally save to other html exp
if self.app.runner:
self.app.runner.addTask(fileName=self.filename) # Add to Runner
try:
self.project = pavlovia.getProject(filename)
except Exception as e: # failed for
self.project = None
print(e)
self.app.updateWindowMenu()
def fileSave(self, event=None, filename=None):
"""Save file, revert to SaveAs if the file hasn't yet been saved
"""
if filename is None:
filename = self.filename
if filename.startswith('untitled'):
if not self.fileSaveAs(filename):
return False # the user cancelled during saveAs
else:
filename = self.exp.saveToXML(filename)
self.fileHistory.AddFileToHistory(filename)
self.setIsModified(False)
# if export on save then we should have an html file to update
if self._getExportPref('on save') and os.path.split(filename)[0]:
self.filename = filename
self.fileExport(htmlPath=self.htmlPath)
return True
def fileSaveAs(self, event=None, filename=None):
"""Defines Save File as Behavior
"""
shortFilename = self.getShortFilename()
expName = self.exp.getExpName()
if (not expName) or (shortFilename == expName):
usingDefaultName = True
else:
usingDefaultName = False
if filename is None:
filename = self.filename
initPath, filename = os.path.split(filename)
_w = "PsychoPy experiments (*.psyexp)|*.psyexp|Any file (*.*)|*"
if sys.platform != 'darwin':
_w += '.*'
wildcard = _translate(_w)
returnVal = False
dlg = wx.FileDialog(
self, message=_translate("Save file as ..."), defaultDir=initPath,
defaultFile=filename, style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT,
wildcard=wildcard)
if dlg.ShowModal() == wx.ID_OK:
newPath = dlg.GetPath()
# update exp name
# if user has not manually renamed experiment
if usingDefaultName:
newShortName = os.path.splitext(
os.path.split(newPath)[1])[0]
self.exp.setExpName(newShortName)
# actually save
self.fileSave(event=None, filename=newPath)
self.filename = newPath
returnVal = 1
dlg.Destroy()
self.updateWindowTitle()
return returnVal
def fileExport(self, event=None, htmlPath=None):
"""Exports the script as an HTML file (PsychoJS library)
"""
# get path if not given one
expPath, expName = os.path.split(self.filename)
if htmlPath is None:
htmlPath = self._getHtmlPath(self.filename)
if not htmlPath:
return
dlg = ExportFileDialog(self, wx.ID_ANY,
title=_translate("Export HTML file"),
filePath=htmlPath,
exp=self.exp)
export = dlg.exportOnSave
if self.exp.settings.params['exportHTML'].val == 'manually':
retVal = dlg.ShowModal()
self.exp.settings.params['exportHTML'].val = export.GetString(export.GetCurrentSelection())
if retVal != wx.ID_OK: # User cancelled export
return False
exportPath = os.path.join(htmlPath, expName.replace('.psyexp', '.js'))
self.generateScript(experimentPath=exportPath,
exp=self.exp,
target="PsychoJS")
def getShortFilename(self):
"""returns the filename without path or extension
"""
return os.path.splitext(os.path.split(self.filename)[1])[0]
# def pluginManager(self, evt=None, value=True):
# """Show the plugin manger frame."""
# PluginManagerFrame(self).ShowModal()
def updateReadme(self):
"""Check whether there is a readme file in this folder and try to show
"""
# create the frame if we don't have one yet
if not hasattr(self, 'readmeFrame') or self.readmeFrame is None:
self.readmeFrame = ReadmeFrame(parent=self)
# look for a readme file
if self.filename and self.filename != 'untitled.psyexp':
dirname = os.path.dirname(self.filename)
possibles = glob.glob(os.path.join(dirname, 'readme*'))
if len(possibles) == 0:
possibles = glob.glob(os.path.join(dirname, 'Readme*'))
possibles.extend(glob.glob(os.path.join(dirname, 'README*')))
# still haven't found a file so use default name
if len(possibles) == 0:
self.readmeFilename = os.path.join(
dirname, 'readme.txt') # use this as our default
else:
self.readmeFilename = possibles[0] # take the first one found
else:
self.readmeFilename = None
self.readmeFrame.setFile(self.readmeFilename)
content = self.readmeFrame.ctrl.GetValue()
if content and self.prefs['alwaysShowReadme']:
self.showReadme()
def showReadme(self, evt=None, value=True):
"""Shows Readme file
"""
if not self.readmeFrame.IsShown():
self.readmeFrame.Show(value)
def toggleReadme(self, evt=None):
"""Toggles visibility of Readme file
"""
if self.readmeFrame is None:
self.updateReadme()
self.showReadme()
else:
self.readmeFrame.toggleVisible()
def OnFileHistory(self, evt=None):
"""get the file based on the menu ID
"""
fileNum = evt.GetId() - wx.ID_FILE1
path = self.fileHistory.GetHistoryFile(fileNum)
self.fileOpen(filename=path)
# add it back to the history so it will be moved up the list
self.fileHistory.AddFileToHistory(path)
def checkSave(self):
"""Check whether we need to save before quitting
"""
if hasattr(self, 'isModified') and self.isModified:
self.Show(True)
self.Raise()
self.app.SetTopWindow(self)
msg = _translate('Experiment %s has changed. Save before '
'quitting?') % self.filename
dlg = dialogs.MessageDialog(self, msg, type='Warning')
resp = dlg.ShowModal()
if resp == wx.ID_CANCEL:
return False # return, don't quit
elif resp == wx.ID_YES:
if not self.fileSave():
return False # user might cancel during save
elif resp == wx.ID_NO:
pass # don't save just quit
return True
def fileClose(self, event=None, checkSave=True, updateViews=True):
"""This is typically only called when the user x
"""
if checkSave:
ok = self.checkSave()
if not ok:
return False # user cancelled
if self.filename is None:
frameData = self.appData['defaultFrame']
else:
frameData = dict(self.appData['defaultFrame'])
self.appData['prevFiles'].append(self.filename)
# get size and window layout info
if self.IsIconized():
self.Iconize(False) # will return to normal mode to get size info
frameData['state'] = 'normal'
elif self.IsMaximized():
# will briefly return to normal mode to get size info
self.Maximize(False)
frameData['state'] = 'maxim'
else:
frameData['state'] = 'normal'
frameData['auiPerspective'] = self._mgr.SavePerspective()
frameData['winW'], frameData['winH'] = self.GetSize()
frameData['winX'], frameData['winY'] = self.GetPosition()
# truncate history to the recent-most last N unique files, where
# N = self.fileHistoryMaxFiles, as defined in makeMenus()
for ii in range(self.fileHistory.GetCount()):
self.appData['fileHistory'].append(
self.fileHistory.GetHistoryFile(ii))
# fileClose gets calls multiple times, so remove redundancy
# while preserving order; end of the list is recent-most:
tmp = []
fhMax = self.fileHistoryMaxFiles
for f in self.appData['fileHistory'][-3 * fhMax:]:
if f not in tmp:
tmp.append(f)
self.appData['fileHistory'] = copy.copy(tmp[-fhMax:])
# assign the data to this filename
self.appData['frames'][self.filename] = frameData
# save the display data only for those frames in the history:
tmp2 = {}
for f in self.appData['frames']:
if f in self.appData['fileHistory']:
tmp2[f] = self.appData['frames'][f]
self.appData['frames'] = copy.copy(tmp2)
# close self
self.routinePanel.removePages()
self.filename = 'untitled.psyexp'
# add the current exp as the start point for undo:
self.resetUndoStack()
if updateViews:
self.updateAllViews()
return 1
def updateAllViews(self):
"""Updates Flow Panel, Routine Panel, and Window Title simultaneously
"""
self.flowPanel.draw()
self.routinePanel.redrawRoutines()
self.updateWindowTitle()
def updateWindowTitle(self, newTitle=None):
"""Defines behavior to update window Title
"""
if newTitle is None:
shortName = os.path.split(self.filename)[-1]
newTitle = '%s - PsychoPy Builder' % (shortName)
self.SetTitle(newTitle)
def setIsModified(self, newVal=None):
"""Sets current modified status and updates save icon accordingly.
This method is called by the methods fileSave, undo, redo,
addToUndoStack and it is usually preferably to call those
than to call this directly.
Call with ``newVal=None``, to only update the save icon(s)
"""
if newVal is None:
newVal = self.getIsModified()
else:
self.isModified = newVal
if hasattr(self, 'bldrBtnSave'):
self.toolbar.EnableTool(self.bldrBtnSave.Id, newVal)
self.fileMenu.Enable(wx.ID_SAVE, newVal)
def getIsModified(self):
"""Checks if changes were made"""
return self.isModified
def resetUndoStack(self):
"""Reset the undo stack. do *immediately after* creating a new exp.
Implicitly calls addToUndoStack() using the current exp as the state
"""
self.currentUndoLevel = 1 # 1 is current, 2 is back one setp...
self.currentUndoStack = []
self.addToUndoStack()
self.updateUndoRedo()
self.setIsModified(newVal=False) # update save icon if needed
def addToUndoStack(self, action="", state=None):
"""Add the given ``action`` to the currentUndoStack, associated
with the @state@. ``state`` should be a copy of the exp
from *immediately after* the action was taken.
If no ``state`` is given the current state of the experiment is used.
If we are at end of stack already then simply append the action. If
not (user has done an undo) then remove orphan actions and append.
"""
if state is None:
state = copy.deepcopy(self.exp)
# remove actions from after the current level
if self.currentUndoLevel > 1:
self.currentUndoStack = self.currentUndoStack[
:-(self.currentUndoLevel - 1)]
self.currentUndoLevel = 1
# append this action
self.currentUndoStack.append({'action': action, 'state': state})
self.setIsModified(newVal=True) # update save icon if needed
self.updateUndoRedo()
def undo(self, event=None):
"""Step the exp back one level in the @currentUndoStack@ if possible,
and update the windows.
Returns the final undo level (1=current, >1 for further in past)
or -1 if redo failed (probably can't undo)
"""
if self.currentUndoLevel >= len(self.currentUndoStack):
return -1 # can't undo
self.currentUndoLevel += 1
state = self.currentUndoStack[-self.currentUndoLevel]['state']
self.exp = copy.deepcopy(state)
self.updateAllViews()
self.setIsModified(newVal=True) # update save icon if needed
self.updateUndoRedo()
return self.currentUndoLevel
def redo(self, event=None):
"""Step the exp up one level in the @currentUndoStack@ if possible,
and update the windows.
Returns the final undo level (0=current, >0 for further in past)
or -1 if redo failed (probably can't redo)
"""
if self.currentUndoLevel <= 1:
return -1 # can't redo, we're already at latest state
self.currentUndoLevel -= 1
self.exp = copy.deepcopy(
self.currentUndoStack[-self.currentUndoLevel]['state'])
self.updateUndoRedo()
self.updateAllViews()
self.setIsModified(newVal=True) # update save icon if needed
return self.currentUndoLevel
def paste(self, event=None):
"""This receives paste commands for all child dialog boxes as well
"""
foc = self.FindFocus()
if hasattr(foc, 'Paste'):
foc.Paste()
def updateUndoRedo(self):
"""Defines Undo and Redo commands for the window
"""
undoLevel = self.currentUndoLevel
# check undo
if undoLevel >= len(self.currentUndoStack):
# can't undo if we're at top of undo stack
label = _translate("Undo\t%s") % self.app.keys['undo']
enable = False
else:
action = self.currentUndoStack[-undoLevel]['action']
txt = _translate("Undo %(action)s\t%(key)s")
fmt = {'action': action, 'key': self.app.keys['undo']}
label = txt % fmt
enable = True
self._undoLabel.SetItemLabel(label)
if hasattr(self, 'bldrBtnUndo'):
self.toolbar.EnableTool(self.bldrBtnUndo.Id, enable)
self.editMenu.Enable(wx.ID_UNDO, enable)
# check redo
if undoLevel == 1:
label = _translate("Redo\t%s") % self.app.keys['redo']
enable = False
else:
action = self.currentUndoStack[-undoLevel + 1]['action']
txt = _translate("Redo %(action)s\t%(key)s")
fmt = {'action': action, 'key': self.app.keys['redo']}
label = txt % fmt
enable = True
self._redoLabel.SetItemLabel(label)
if hasattr(self, 'bldrBtnRedo'):
self.toolbar.EnableTool(self.bldrBtnRedo.Id, enable)
self.editMenu.Enable(wx.ID_REDO, enable)
def demosUnpack(self, event=None):
"""Get a folder location from the user and unpack demos into it."""
# choose a dir to unpack in
dlg = wx.DirDialog(parent=self, message=_translate(
"Location to unpack demos"))
if dlg.ShowModal() == wx.ID_OK:
unpackFolder = dlg.GetPath()
else:
return -1 # user cancelled
# ensure it's an empty dir:
if os.listdir(unpackFolder) != []:
unpackFolder = os.path.join(unpackFolder, 'PsychoPy3 Demos')
if not os.path.isdir(unpackFolder):
os.mkdir(unpackFolder)
mergeFolder(os.path.join(self.paths['demos'], 'builder'),
unpackFolder)
self.prefs['unpackedDemosDir'] = unpackFolder
self.app.prefs.saveUserPrefs()
self.updateDemosMenu()
def demoLoad(self, event=None):
"""Defines Demo Loading Event."""
fileDir = self.demos[event.GetId()]
files = glob.glob(os.path.join(fileDir, '*.psyexp'))
if len(files) == 0:
print("Found no psyexp files in %s" % fileDir)
else:
self.fileOpen(event=None, filename=files[0], closeCurrent=True)
def updateDemosMenu(self):
"""Update Demos menu as needed."""
unpacked = self.prefs['unpackedDemosDir']
if not unpacked:
return
# list available demos
demoList = sorted(glob.glob(os.path.join(unpacked, '*')))
self.demos = {wx.NewIdRef(): demoList[n]
for n in range(len(demoList))}
for thisID in self.demos:
junk, shortname = os.path.split(self.demos[thisID])
if (shortname.startswith('_') or
shortname.lower().startswith('readme.')):
continue # ignore 'private' or README files
self.demosMenu.Append(thisID, shortname)
self.Bind(wx.EVT_MENU, self.demoLoad, id=thisID)
def runFile(self, event=None):
"""Open Runner for running the psyexp file."""
if not os.path.exists(self.filename):
ok = self.fileSave(self.filename)
if not ok:
return # save file before compiling script
if self.getIsModified():
ok = self.fileSave(self.filename)
if not ok:
return # save file before compiling script
self.app.showRunner()
self.stdoutFrame.addTask(fileName=self.filename)
self.app.runner.Raise()
if event:
if event.Id in [self.bldrBtnRun.Id, self.bldrRun.Id]:
self.app.runner.panel.runLocal(event)
else:
self.app.showRunner()
def onCopyRoutine(self, event=None):
"""copy the current routine from self.routinePanel
to self.app.copiedRoutine.
"""
r = copy.deepcopy(self.routinePanel.getCurrentRoutine())
if r is not None:
self.app.copiedRoutine = r
def onPasteRoutine(self, event=None):
"""Paste the current routine from self.app.copiedRoutine to a new page
in self.routinePanel after promting for a new name.
"""
if self.app.copiedRoutine is None:
return -1
origName = self.app.copiedRoutine.name
defaultName = self.exp.namespace.makeValid(origName)
msg = _translate('New name for copy of "%(copied)s"? [%(default)s]')
vals = {'copied': origName, 'default': defaultName}
message = msg % vals
dlg = wx.TextEntryDialog(self, message=message,
caption=_translate('Paste Routine'))
if dlg.ShowModal() == wx.ID_OK:
routineName = dlg.GetValue()
newRoutine = copy.deepcopy(self.app.copiedRoutine)
if not routineName:
routineName = defaultName
newRoutine.name = self.exp.namespace.makeValid(routineName)
newRoutine.params['name'] = newRoutine.name
self.exp.namespace.add(newRoutine.name)
# add to the experiment
self.exp.addRoutine(newRoutine.name, newRoutine)
for newComp in newRoutine: # routine == list of components
newName = self.exp.namespace.makeValid(newComp.params['name'])
self.exp.namespace.add(newName)
newComp.params['name'].val = newName
# could do redrawRoutines but would be slower?
self.routinePanel.addRoutinePage(newRoutine.name, newRoutine)
self.addToUndoStack("PASTE Routine `%s`" % newRoutine.name)
dlg.Destroy()
def onPasteCompon(self, event=None):
"""
Paste the copied Component (if there is one) into the current
Routine
"""
routinePage = self.routinePanel.getCurrentPage()
routinePage.pasteCompon()
def onURL(self, evt):
"""decompose the URL of a file and line number"""
# "C:\Program Files\wxPython...\samples\hangman\hangman.py"
filename = evt.GetString().split('"')[1]
lineNumber = int(evt.GetString().split(',')[1][5:])
self.app.showCoder()
self.app.coder.gotoLine(filename, lineNumber)
def setExperimentSettings(self, event=None, timeout=None):
"""Defines ability to save experiment settings
"""
component = self.exp.settings
# does this component have a help page?
if hasattr(component, 'url'):
helpUrl = component.url
else:
helpUrl = None
title = '%s Properties' % self.exp.getExpName()
dlg = DlgExperimentProperties(frame=self, title=title,
params=component.params,
helpUrl=helpUrl, order=component.order,
timeout=timeout)
if dlg.OK:
self.addToUndoStack("EDIT experiment settings")
self.setIsModified(True)
def addRoutine(self, event=None):
"""Defines ability to add routine in the routine panel
"""
self.routinePanel.createNewRoutine()
def renameRoutine(self, name, event=None, returnName=True):
"""Defines ability to rename routine in the routine panel
"""
# get notebook details
currentRoutine = self.routinePanel.getCurrentPage()
currentRoutineIndex = self.routinePanel.GetPageIndex(currentRoutine)
routine = self.routinePanel.GetPage(
self.routinePanel.GetSelection()).routine
oldName = routine.name
msg = _translate("What is the new name for the Routine?")
dlg = wx.TextEntryDialog(self, message=msg, value=oldName,
caption=_translate('Rename'))
exp = self.exp
if dlg.ShowModal() == wx.ID_OK:
name = dlg.GetValue()
# silently auto-adjust the name to be valid, and register in the
# namespace:
name = exp.namespace.makeValid(
name, prefix='routine')
if oldName in self.exp.routines:
# Swap old with new names
self.exp.routines[oldName].name = name
self.exp.routines[name] = self.exp.routines.pop(oldName)
for comp in self.exp.routines[name]:
comp.parentName = name
self.exp.namespace.rename(oldName, name)
self.routinePanel.renameRoutinePage(currentRoutineIndex, name)
self.addToUndoStack("`RENAME Routine `%s`" % oldName)
dlg.Destroy()
self.flowPanel.draw()
def compileScript(self, event=None):
"""Defines compile script button behavior"""
fullPath = self.filename.replace('.psyexp', '.py')
self.generateScript(experimentPath=fullPath, exp=self.exp)
self.app.showCoder() # make sure coder is visible
self.app.coder.fileNew(filepath=fullPath)
self.app.coder.fileReload(event=None, filename=fullPath)
@property
def stdoutFrame(self):
"""
Gets Experiment Runner stdout.
"""
if not self.app.runner:
self.app.runner = self.app.showRunner()
return self.app.runner
def _getHtmlPath(self, filename):
expPath = os.path.split(filename)[0]
if not os.path.isdir(expPath):
retVal = self.fileSave()
if retVal:
return self._getHtmlPath(self.filename)
else:
return False
htmlPath = os.path.join(expPath, self.exp.htmlFolder)
return htmlPath
def _getExportPref(self, pref):
"""Returns True if pref matches exportHTML preference"""
if pref.lower() not in [prefs.lower() for prefs in self.exp.settings.params['exportHTML'].allowedVals]:
raise ValueError("'{}' is not an allowed value for {}".format(pref, 'exportHTML'))
exportHtml = str(self.exp.settings.params['exportHTML'].val).lower()
if exportHtml == pref.lower():
return True
def onPavloviaSync(self, evt=None):
if self._getExportPref('on sync'):
htmlPath = self._getHtmlPath(self.filename)
if htmlPath:
self.fileExport(htmlPath=htmlPath)
else:
return
self.enablePavloviaButton(['pavloviaSync', 'pavloviaRun'], False)
try:
retVal = pavlovia_ui.syncProject(parent=self, project=self.project)
pavlovia.knownProjects.save() # update projects.json
self.gitFeedback(retVal)
finally:
self.enablePavloviaButton(['pavloviaSync', 'pavloviaRun'], True)
def onPavloviaRun(self, evt=None):
if self._getExportPref('on save'):
self.fileSave()
retVal = pavlovia_ui.syncProject(parent=self, project=self.project,
closeFrameWhenDone=False)
self.gitFeedback(retVal)
elif self._getExportPref('on sync'):
self.fileExport(htmlPath=self._getHtmlPath(self.filename))
retVal = pavlovia_ui.syncProject(parent=self, project=self.project,
closeFrameWhenDone=False)
self.gitFeedback(retVal)
elif self._getExportPref('manually'):
# Check htmlpath and projects exists
noHtmlFolder = not os.path.isdir(self._getHtmlPath(self.filename))
noProject = not bool(pavlovia.getProject(self.filename))
if noHtmlFolder:
self.fileExport()
if noProject or noHtmlFolder:
retVal = pavlovia_ui.syncProject(parent=self, project=self.project,
closeFrameWhenDone=False)
self.gitFeedback(retVal)
if self.project:
htmlPath = self.exp.settings.params['HTML path'].val
self.project.pavloviaStatus = 'ACTIVATED'
url = "https://pavlovia.org/run/{}/{}".format(self.project.id, htmlPath)
wx.LaunchDefaultBrowser(url)
def enablePavloviaButton(self, buttons, enable):
"""
Enables or disables Pavlovia buttons.
Parameters
----------
name: string, list
Takes single buttons 'pavloviaSync', 'pavloviaRun', 'pavloviaSearch', 'pavloviaUser',
or multiple buttons in string 'pavloviaSync, pavloviaRun',
or comma separated list of strings ['pavloviaSync', 'pavloviaRun', ...].
enable: bool
True enables and False disables the button
"""
if isinstance(buttons, str):
buttons = buttons.split(',')
for button in buttons:
self.toolbar.EnableTool(self.btnHandles[button.strip(' ')].GetId(), enable)
def setPavloviaUser(self, user):
# TODO: update user icon on button to user avatar
pass
def gitFeedback(self, val):
"""
Set feedback color for the Pavlovia Sync toolbar button.
Parameters
----------
val: int
Status of git sync. 1 for SUCCESS (green), 0 or -1 for FAIL (RED)
"""
feedbackTime = 1500
colour = {0: "red", -1: "red", 1: "green"}
toolbarSize = 32
# Store original
origBtn = self.btnHandles['pavloviaSync'].NormalBitmap
# Create new feedback bitmap
feedbackBmp = self.app.iconCache.getBitmap(
name='{}globe.png'.format(colour[val]),
size=toolbarSize)
# Set feedback button
self.btnHandles['pavloviaSync'].SetNormalBitmap(feedbackBmp)
self.toolbar.Realize()
self.toolbar.Refresh()
# Reset button to default state after time
wx.CallLater(feedbackTime, self.btnHandles['pavloviaSync'].SetNormalBitmap, origBtn)
wx.CallLater(feedbackTime + 50, self.toolbar.Realize)
wx.CallLater(feedbackTime + 50, self.toolbar.Refresh)
@property
def project(self):
"""A PavloviaProject object if one is known for this experiment
"""
if 'project' in self.__dict__ and self.__dict__['project']:
return self.__dict__['project']
elif self.filename and pavlovia.getProject(self.filename):
return pavlovia.getProject(self.filename)
else:
return None
@project.setter
def project(self, project):
self.__dict__['project'] = project
class RoutinesNotebook(aui.AuiNotebook, ThemeMixin):
"""A notebook that stores one or more routines
"""
def __init__(self, frame, id=-1):
self.frame = frame
self.app = frame.app
self.routineMaxSize = 2
self.appData = self.app.prefs.appData
aui.AuiNotebook.__init__(self, frame, id)
self.Bind(aui.EVT_AUINOTEBOOK_PAGE_CLOSE, self.onClosePane)
# double buffered better rendering except if retina
self.SetDoubleBuffered(self.frame.IsDoubleBuffered())
self._applyAppTheme()
if not hasattr(self.frame, 'exp'):
return # we haven't yet added an exp
def _applyAppTheme(self, target=None):
self.SetArtProvider(PsychopyTabArt())
self.GetAuiManager().SetArtProvider(PsychopyDockArt())
for index in range(self.GetPageCount()):
page = self.GetPage(index)
# double buffered better rendering except if retina
self.SetDoubleBuffered(self.frame.IsDoubleBuffered())
page._applyAppTheme()
self.Refresh()
def getCurrentRoutine(self):
routinePage = self.getCurrentPage()
if routinePage:
return routinePage.routine # no routine page
return None
def setCurrentRoutine(self, routine):
for ii in range(self.GetPageCount()):
if routine is self.GetPage(ii).routine:
self.SetSelection(ii)
def getCurrentPage(self):
if self.GetSelection() >= 0:
return self.GetPage(self.GetSelection())
return None
def addRoutinePage(self, routineName, routine):
# routinePage = RoutinePage(parent=self, routine=routine)
routinePage = RoutineCanvas(notebook=self, routine=routine)
self.AddPage(routinePage, routineName)
def renameRoutinePage(self, index, newName, ):
self.SetPageText(index, newName)
def removePages(self):
for ii in range(self.GetPageCount()):
currId = self.GetSelection()
self.DeletePage(currId)
def createNewRoutine(self, returnName=False):
msg = _translate("What is the name for the new Routine? "
"(e.g. instr, trial, feedback)")
dlg = wx.TextEntryDialog(self, message=msg,
caption=_translate('New Routine'))
exp = self.frame.exp
routineName = None
if dlg.ShowModal() == wx.ID_OK:
routineName = dlg.GetValue()
# silently auto-adjust the name to be valid, and register in the
# namespace:
routineName = exp.namespace.makeValid(
routineName, prefix='routine')
exp.namespace.add(routineName) # add to the namespace
exp.addRoutine(routineName) # add to the experiment
# then to the notebook:
self.addRoutinePage(routineName, exp.routines[routineName])
self.frame.addToUndoStack("NEW Routine `%s`" % routineName)
dlg.Destroy()
if returnName:
return routineName
def onClosePane(self, event=None):
"""Close the pane and remove the routine from the exp
"""
routine = self.GetPage(event.GetSelection()).routine
name = routine.name
# update experiment object, namespace, and flow window (if this is
# being used)
if name in self.frame.exp.routines:
# remove names of the routine and its components from namespace
_nsp = self.frame.exp.namespace
for c in self.frame.exp.routines[name]:
_nsp.remove(c.params['name'].val)
_nsp.remove(self.frame.exp.routines[name].name)
del self.frame.exp.routines[name]
if routine in self.frame.exp.flow:
self.frame.exp.flow.removeComponent(routine)
self.frame.flowPanel.draw()
self.frame.addToUndoStack("REMOVE Routine `%s`" % (name))
def increaseSize(self, event=None):
self.appData['routineSize'] = min(
self.routineMaxSize, self.appData['routineSize'] + 1)
with WindowFrozen(self):
self.redrawRoutines()
def decreaseSize(self, event=None):
self.appData['routineSize'] = max(0, self.appData['routineSize'] - 1)
with WindowFrozen(self):
self.redrawRoutines()
def redrawRoutines(self):
"""Removes all the routines, adds them back (alphabetical order),
sets current back to orig
"""
currPage = self.GetSelection()
self.removePages()
displayOrder = sorted(self.frame.exp.routines.keys()) # alphabetical
for routineName in displayOrder:
self.addRoutinePage(
routineName, self.frame.exp.routines[routineName])
if currPage > -1:
self.SetSelection(currPage)
class RoutineCanvas(wx.ScrolledWindow):
"""Represents a single routine (used as page in RoutinesNotebook)"""
def __init__(self, notebook, id=wx.ID_ANY, routine=None):
"""This window is based heavily on the PseudoDC demo of wxPython
"""
wx.ScrolledWindow.__init__(
self, notebook, id, (0, 0), style=wx.BORDER_NONE | wx.VSCROLL)
self.frame = notebook.frame
self.app = self.frame.app
self.dpi = self.app.dpi
self.lines = []
self.maxWidth = self.GetSize().GetWidth()
self.maxHeight = 15 * self.dpi
self.x = self.y = 0
self.curLine = []
self.drawing = False
self.drawSize = self.app.prefs.appData['routineSize']
# auto-rescale based on number of components and window size is jumpy
# when switch between routines of diff drawing sizes
self.iconSize = (24, 24, 48)[self.drawSize] # only 24, 48 so far
self.fontBaseSize = (1100, 1200, 1300)[self.drawSize] # depends on OS?
#self.scroller = PsychopyScrollbar(self, wx.VERTICAL)
self.SetVirtualSize((self.maxWidth, self.maxHeight))
self.SetScrollRate(self.dpi / 4, self.dpi / 4)
self.routine = routine
self.yPositions = None
self.yPosTop = (25, 40, 60)[self.drawSize]
# the step in Y between each component
self.componentStep = (25, 32, 50)[self.drawSize]
self.timeXposStart = (150, 150, 200)[self.drawSize]
# the left hand edge of the icons:
_scale = (1.3, 1.5, 1.5)[self.drawSize]
self.iconXpos = self.timeXposStart - self.iconSize * _scale
self.timeXposEnd = self.timeXposStart + 400 # onResize() overrides
# create a PseudoDC to record our drawing
self.pdc = PseudoDC()
self.pen_cache = {}
self.brush_cache = {}
# vars for handling mouse clicks
self.dragid = -1
self.lastpos = (0, 0)
# use the ID of the drawn icon to retrieve component name:
self.componentFromID = {}
self.contextMenuItems = ['copy', 'edit', 'remove',
'move to top', 'move up',
'move down', 'move to bottom']
# labels are only for display, and allow localization
self.contextMenuLabels = {k: _localized[k]
for k in self.contextMenuItems}
self.contextItemFromID = {}
self.contextIDFromItem = {}
for item in self.contextMenuItems:
id = wx.NewIdRef()
self.contextItemFromID[id] = item
self.contextIDFromItem[item] = id
self._applyAppTheme()
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda x: None)
self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnScroll)
self.Bind(wx.EVT_SIZE, self.onResize)
# crashes if drop on OSX:
# self.SetDropTarget(FileDropTarget(builder = self.frame))
def _applyAppTheme(self, target=None):
"""Synonymise app theme method with redraw method"""
return self.redrawRoutine()
def onResize(self, event):
self.sizePix = event.GetSize()
self.timeXposStart = (150, 150, 200)[self.drawSize]
self.timeXposEnd = self.sizePix[0] - (60, 80, 100)[self.drawSize]
self.redrawRoutine() # then redraw visible
def ConvertEventCoords(self, event):
xView, yView = self.GetViewStart()
xDelta, yDelta = self.GetScrollPixelsPerUnit()
return (event.GetX() + (xView * xDelta),
event.GetY() + (yView * yDelta))
def OffsetRect(self, r):
"""Offset the rectangle, r, to appear in the given pos in the window
"""
xView, yView = self.GetViewStart()
xDelta, yDelta = self.GetScrollPixelsPerUnit()
r.OffsetXY(-(xView * xDelta), -(yView * yDelta))
def OnMouse(self, event):
if event.LeftDown():
x, y = self.ConvertEventCoords(event)
icons = self.pdc.FindObjectsByBBox(x, y)
if len(icons):
self.editComponentProperties(
component=self.componentFromID[icons[0]])
elif event.RightDown():
x, y = self.ConvertEventCoords(event)
icons = self.pdc.FindObjectsByBBox(x, y)
menuPos = event.GetPosition()
if self.app.prefs.builder['topFlow']:
# width of components panel
menuPos[0] += self.frame.componentButtons.GetSize()[0]
# height of flow panel
menuPos[1] += self.frame.flowPanel.GetSize()[1]
if len(icons):
self._menuComponent = self.componentFromID[icons[0]]
self.showContextMenu(self._menuComponent, xy=menuPos)
elif event.Dragging() or event.LeftUp():
if self.dragid != -1:
pass
if event.LeftUp():
pass
elif event.Moving():
try:
x, y = self.ConvertEventCoords(event)
id = self.pdc.FindObjectsByBBox(x, y)[0]
component = self.componentFromID[id]
self.frame.SetStatusText("Component: "+component.params['name'].val)
except IndexError:
self.frame.SetStatusText("")
def OnScroll(self, event):
xy = self.GetViewStart()
multiplier = self.dpi / 1600
self.Scroll(xy[0], xy[1] - event.WheelRotation*multiplier)
def showContextMenu(self, component, xy):
menu = wx.Menu()
for item in self.contextMenuItems:
id = self.contextIDFromItem[item]
menu.Append(id, self.contextMenuLabels[item])
menu.Bind(wx.EVT_MENU, self.onContextSelect, id=id)
self.frame.PopupMenu(menu, xy)
menu.Destroy() # destroy to avoid mem leak
def onContextSelect(self, event):
"""Perform a given action on the component chosen
"""
op = self.contextItemFromID[event.GetId()]
component = self._menuComponent
r = self.routine
if op == 'edit':
self.editComponentProperties(component=component)
elif op == 'copy':
self.copyCompon(component=component)
elif op == 'remove':
r.removeComponent(component)
self.frame.addToUndoStack(
"REMOVE `%s` from Routine" % (component.params['name'].val))
self.frame.exp.namespace.remove(component.params['name'].val)
elif op.startswith('move'):
lastLoc = r.index(component)
r.remove(component)
if op == 'move to top':
r.insert(0, component)
if op == 'move up':
r.insert(lastLoc - 1, component)
if op == 'move down':
r.insert(lastLoc + 1, component)
if op == 'move to bottom':
r.append(component)
self.frame.addToUndoStack("MOVED `%s`" %
component.params['name'].val)
self.redrawRoutine()
self._menuComponent = None
def OnPaint(self, event):
# Create a buffered paint DC. It will create the real
# wx.PaintDC and then blit the bitmap to it when dc is
# deleted.
dc = wx.GCDC(wx.BufferedPaintDC(self))
# we need to clear the dc BEFORE calling PrepareDC
bg = wx.Brush(self.GetBackgroundColour())
dc.SetBackground(bg)
dc.Clear()
# use PrepareDC to set position correctly
self.PrepareDC(dc)
# create a clipping rect from our position and size
# and the Update Region
xv, yv = self.GetViewStart()
dx, dy = self.GetScrollPixelsPerUnit()
x, y = (xv * dx, yv * dy)
rgn = self.GetUpdateRegion()
rgn.Offset(x, y)
r = rgn.GetBox()
# draw to the dc using the calculated clipping rect
self.pdc.DrawToDCClipped(dc, r)
def redrawRoutine(self):
self.pdc.Clear() # clear the screen
self.pdc.RemoveAll() # clear all objects (icon buttons)
self.SetBackgroundColour(ThemeMixin.appColors['tab_bg'])
# work out where the component names and icons should be from name
# lengths
self.setFontSize(self.fontBaseSize // self.dpi, self.pdc)
longest = 0
w = 50
for comp in self.routine:
name = comp.params['name'].val
if len(name) > longest:
longest = len(name)
w = self.GetFullTextExtent(name)[0]
self.timeXpos = w + (50, 50, 90)[self.drawSize]
# separate components according to whether they are drawn in separate
# row
rowComponents = []
staticCompons = []
for n, component in enumerate(self.routine):
if component.type == 'Static':
staticCompons.append(component)
else:
rowComponents.append(component)
# draw static, time grid, normal (row) comp:
yPos = self.yPosTop
yPosBottom = yPos + len(rowComponents) * self.componentStep
# draw any Static Components first (below the grid)
for component in staticCompons:
bottom = max(yPosBottom, self.GetSize()[1])
self.drawStatic(self.pdc, component, yPos, bottom)
self.drawTimeGrid(self.pdc, yPos, yPosBottom)
# normal components, one per row
for component in rowComponents:
self.drawComponent(self.pdc, component, yPos)
yPos += self.componentStep
# the 50 allows space for labels below the time axis
self.SetVirtualSize((self.maxWidth, yPos + 50))
self.Refresh() # refresh the visible window after drawing (OnPaint)
#self.scroller.Resize()
def getMaxTime(self):
"""Return the max time to be drawn in the window
"""
maxTime, nonSlip = self.routine.getMaxTime()
if self.routine.hasOnlyStaticComp():
maxTime = int(maxTime) + 1.0
return maxTime
def drawTimeGrid(self, dc, yPosTop, yPosBottom, labelAbove=True):
"""Draws the grid of lines and labels the time axes
"""
tMax = self.getMaxTime() * 1.1
xScale = self.getSecsPerPixel()
xSt = self.timeXposStart
xEnd = self.timeXposEnd
# dc.SetId(wx.NewIdRef())
dc.SetPen(wx.Pen(ThemeMixin.appColors['rt_timegrid']))
dc.SetTextForeground(wx.Colour(ThemeMixin.appColors['rt_timegrid']))
# draw horizontal lines on top and bottom
dc.DrawLine(x1=xSt, y1=yPosTop,
x2=xEnd, y2=yPosTop)
dc.DrawLine(x1=xSt, y1=yPosBottom,
x2=xEnd, y2=yPosBottom)
# draw vertical time points
# gives roughly 1/10 the width, but in rounded to base 10 of
# 0.1,1,10...
unitSize = 10 ** numpy.ceil(numpy.log10(tMax * 0.8)) / 10.0
if tMax / unitSize < 3:
# gives units of 2 (0.2,2,20)
unitSize = 10 ** numpy.ceil(numpy.log10(tMax * 0.8)) / 50.0
elif tMax / unitSize < 6:
# gives units of 5 (0.5,5,50)
unitSize = 10 ** numpy.ceil(numpy.log10(tMax * 0.8)) / 20.0
for lineN in range(int(numpy.floor((tMax / unitSize)))):
# vertical line:
dc.DrawLine(xSt + lineN * unitSize / xScale, yPosTop - 4,
xSt + lineN * unitSize / xScale, yPosBottom + 4)
# label above:
dc.DrawText('%.2g' % (lineN * unitSize), xSt + lineN *
unitSize / xScale - 4, yPosTop - 30)
if yPosBottom > 300:
# if bottom of grid is far away then draw labels here too
dc.DrawText('%.2g' % (lineN * unitSize), xSt + lineN *
unitSize / xScale - 4, yPosBottom + 10)
# add a label
self.setFontSize(self.fontBaseSize // self.dpi, dc)
# y is y-half height of text
dc.DrawText('t (sec)', xEnd + 5,
yPosTop - self.GetFullTextExtent('t')[1] / 2.0)
# or draw bottom labels only if scrolling is turned on, virtual size >
# available size?
if yPosBottom > 300:
# if bottom of grid is far away then draw labels there too
# y is y-half height of text
dc.DrawText('t (sec)', xEnd + 5,
yPosBottom - self.GetFullTextExtent('t')[1] / 2.0)
dc.SetTextForeground(ThemeMixin.appColors['text'])
def setFontSize(self, size, dc):
font = self.GetFont()
font.SetPointSize(size)
dc.SetFont(font)
self.SetFont(font)
def drawStatic(self, dc, component, yPosTop, yPosBottom):
"""draw a static (ISI) component box"""
# set an id for the region of this component (so it can
# act as a button). see if we created this already.
id = None
for key in self.componentFromID:
if self.componentFromID[key] == component:
id = key
if not id: # then create one and add to the dict
id = wx.NewIdRef()
self.componentFromID[id] = component
dc.SetId(id)
# deduce start and stop times if possible
startTime, duration, nonSlipSafe = component.getStartAndDuration()
# ensure static comps are clickable (even if $code start or duration)
unknownTiming = False
if startTime is None:
startTime = 0
unknownTiming = True
if duration is None:
duration = 0 # minimal extent ensured below
unknownTiming = True
# calculate rectangle for component
xScale = self.getSecsPerPixel()
if component.params['disabled'].val:
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_static_disabled']))
dc.SetPen(wx.Pen(ThemeMixin.appColors['rt_static_disabled']))
else:
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_static']))
dc.SetPen(wx.Pen(ThemeMixin.appColors['rt_static']))
xSt = self.timeXposStart + startTime // xScale
w = duration // xScale + 1 # +1 b/c border alpha=0 in dc.SetPen
w = max(min(w, 10000), 2) # ensure 2..10000 pixels
h = yPosBottom - yPosTop
# name label, position:
name = component.params['name'].val # "ISI"
if unknownTiming:
# flag it as not literally represented in time, e.g., $code
# duration
name += ' ???'
nameW, nameH = self.GetFullTextExtent(name)[0:2]
x = xSt + w // 2
staticLabelTop = (0, 50, 60)[self.drawSize]
y = staticLabelTop - nameH * 3
fullRect = wx.Rect(x - 20, y, nameW, nameH)
# draw the rectangle, draw text on top:
dc.DrawRectangle(xSt, yPosTop - nameH * 4, w, h + nameH * 5)
dc.DrawText(name, x - nameW // 2, y)
# update bounds to include time bar
fullRect.Union(wx.Rect(xSt, yPosTop, w, h))
dc.SetIdBounds(id, fullRect)
def drawComponent(self, dc, component, yPos):
"""Draw the timing of one component on the timeline"""
# set an id for the region of this component (so it
# can act as a button). see if we created this already
id = None
for key in self.componentFromID:
if self.componentFromID[key] == component:
id = key
if not id: # then create one and add to the dict
id = wx.NewIdRef()
self.componentFromID[id] = component
dc.SetId(id)
iconYOffset = (6, 6, 0)[self.drawSize]
icons = self.app.iconCache
thisIcon = icons.getComponentBitmap(component, self.iconSize)
dc.DrawBitmap(thisIcon, self.iconXpos, yPos + iconYOffset, True)
fullRect = wx.Rect(self.iconXpos, yPos,
thisIcon.GetWidth(), thisIcon.GetHeight())
self.setFontSize(self.fontBaseSize // self.dpi, dc)
name = component.params['name'].val
# get size based on text
w, h = self.GetFullTextExtent(name)[0:2]
if w > self.iconXpos - self.dpi/5:
# If width is greater than space available, split word at point calculated by average letter width
maxLen = int(
(self.iconXpos - self.GetFullTextExtent("...")[0] - self.dpi/5)
/ (w/len(name))
)
splitAt = int(maxLen/2)
name = name[:splitAt] + "..." + name[-splitAt:]
w = self.iconXpos - self.dpi/5
# draw text
# + x position of icon (left side)
# - half width of icon (including whitespace around it)
# - FULL width of text
# + slight adjustment for whitespace
x = self.iconXpos - thisIcon.GetWidth()/2 - w + thisIcon.GetWidth()/3
_adjust = (5, 5, -2)[self.drawSize]
y = yPos + thisIcon.GetHeight() // 2 - h // 2 + _adjust
dc.DrawText(name, x, y)
fullRect.Union(wx.Rect(x - 20, y, w, h))
# deduce start and stop times if possible
startTime, duration, nonSlipSafe = component.getStartAndDuration()
# draw entries on timeline (if they have some time definition)
if startTime is not None and duration is not None:
# then we can draw a sensible time bar!
dc.SetPen(wx.Pen(ThemeMixin.appColors['rt_comp'],
style=wx.TRANSPARENT))
if component.params['disabled'].val:
# Grey bar if comp is disabled
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_comp_disabled']))
dc.DrawBitmap(thisIcon.ConvertToDisabled(), self.iconXpos, yPos + iconYOffset, True)
elif 'forceEndRoutine' in component.params \
or 'forceEndRoutineOnPress' in component.params:
if any(component.params[key].val
for key in ['forceEndRoutine', 'forceEndRoutineOnPress']
if key in component.params):
# Orange bar if component has forceEndRoutine or forceEndRoutineOnPress and either are true
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_comp_force']))
else:
# Blue bar if component has forceEndRoutine or forceEndRoutineOnPress but none are true
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_comp']))
dc.DrawBitmap(thisIcon, self.iconXpos, yPos + iconYOffset, True)
else:
# Blue bar otherwise
dc.SetBrush(wx.Brush(ThemeMixin.appColors['rt_comp']))
dc.DrawBitmap(thisIcon, self.iconXpos, yPos + iconYOffset, True)
xScale = self.getSecsPerPixel()
yOffset = (3.5, 3.5, 0.5)[self.drawSize]
h = self.componentStep // (4, 3.25, 2.5)[self.drawSize]
xSt = self.timeXposStart + startTime // xScale
w = duration // xScale + 1
if w > 10000:
w = 10000 # limit width to 10000 pixels!
if w < 2:
w = 2 # make sure at least one pixel shows
dc.DrawRectangle(xSt, y + yOffset, w, h)
# update bounds to include time bar
fullRect.Union(wx.Rect(xSt, y + yOffset, w, h))
dc.SetIdBounds(id, fullRect)
def copyCompon(self, event=None, component=None):
"""This is easy - just take a copy of the component into memory
"""
self.app.copiedCompon = copy.deepcopy(component)
def pasteCompon(self, event=None, component=None):
if not self.app.copiedCompon:
return -1 # not possible to paste if nothing copied
exp = self.frame.exp
origName = self.app.copiedCompon.params['name'].val
defaultName = exp.namespace.makeValid(origName)
msg = _translate('New name for copy of "%(copied)s"? [%(default)s]')
vals = {'copied': origName, 'default': defaultName}
message = msg % vals
dlg = wx.TextEntryDialog(self, message=message,
caption=_translate('Paste Component'))
if dlg.ShowModal() == wx.ID_OK:
newName = dlg.GetValue()
newCompon = copy.deepcopy(self.app.copiedCompon)
if not newName:
newName = defaultName
newName = exp.namespace.makeValid(newName)
newCompon.params['name'].val = newName
if 'name' in dir(newCompon):
newCompon.name = newName
self.routine.addComponent(newCompon)
self.frame.exp.namespace.user.append(newName)
# could do redrawRoutines but would be slower?
self.redrawRoutine()
self.frame.addToUndoStack("PASTE Component `%s`" % newName)
dlg.Destroy()
def editComponentProperties(self, event=None, component=None):
# we got here from a wx.button press (rather than our own drawn icons)
if event:
componentName = event.EventObject.GetName()
component = self.routine.getComponentFromName(componentName)
# does this component have a help page?
if hasattr(component, 'url'):
helpUrl = component.url
else:
helpUrl = None
old_name = component.params['name'].val
old_disabled = component.params['disabled'].val
# check current timing settings of component (if it changes we
# need to update views)
initialTimings = component.getStartAndDuration()
if 'forceEndRoutine' in component.params \
or 'forceEndRoutineOnPress' in component.params:
# If component can force end routine, check if it did before
initialForce = [component.params[key].val
for key in ['forceEndRoutine', 'forceEndRoutineOnPress']
if key in component.params]
else:
initialForce = False
# create the dialog
if hasattr(component, 'type') and component.type.lower() == 'code':
_Dlg = DlgCodeComponentProperties
else:
_Dlg = DlgComponentProperties
dlg = _Dlg(frame=self.frame,
title=component.params['name'].val + ' Properties',
params=component.params,
order=component.order, helpUrl=helpUrl, editing=True,
depends=component.depends)
if dlg.OK:
# Redraw if force end routine has changed
if 'forceEndRoutine' in component.params \
or 'forceEndRoutineOnPress' in component.params:
newForce = [component.params[key].val
for key in ['forceEndRoutine', 'forceEndRoutineOnPress']
if key in component.params]
if initialForce != newForce:
self.redrawRoutine() # need to refresh timings section
self.Refresh() # then redraw visible
self.frame.flowPanel.draw()
# Redraw if timings have changed
if component.getStartAndDuration() != initialTimings:
self.redrawRoutine() # need to refresh timings section
self.Refresh() # then redraw visible
self.frame.flowPanel.draw()
# self.frame.flowPanel.Refresh()
elif component.params['name'].val != old_name:
self.redrawRoutine() # need to refresh name
elif component.params['disabled'].val != old_disabled:
self.redrawRoutine() # need to refresh color
self.frame.exp.namespace.remove(old_name)
self.frame.exp.namespace.add(component.params['name'].val)
self.frame.addToUndoStack("EDIT `%s`" %
component.params['name'].val)
def getSecsPerPixel(self):
pixels = float(self.timeXposEnd - self.timeXposStart)
return self.getMaxTime() / pixels
class ComponentsPanel(scrolledpanel.ScrolledPanel):
def __init__(self, frame, id=-1):
"""A panel that displays available components.
"""
self.frame = frame
self.app = frame.app
self.dpi = self.app.dpi
panelWidth = 3 * 48 + 50
scrolledpanel.ScrolledPanel.__init__(self,
frame,
id,
size=(panelWidth, 10 * self.dpi),
style=wx.BORDER_NONE)
self._maxBtnWidth = 0 # will store width of widest button
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.componentButtons = []
self.components = experiment.getAllComponents(
self.app.prefs.builder['componentsFolders'])
categories = ['Favorites']
categories.extend(components.getAllCategories(
self.app.prefs.builder['componentsFolders']))
# get rid of hidden components
for hiddenComp in self.frame.prefs['hiddenComponents']:
if hiddenComp in self.components:
del self.components[hiddenComp]
# also remove settings - that's in toolbar not components panel
del self.components['SettingsComponent']
# get favorites
self.favorites = FavoriteComponents(componentsPanel=self)
# create labels and sizers for each category
self.componentFromID = {}
self.panels = {}
# to keep track of the objects (sections and section labels)
# within the main sizer
self.sizerList = []
for categ in categories:
if categ in _localized:
label = _localized[categ]
else:
label = categ
_style = platebtn.PB_STYLE_DROPARROW | platebtn.PB_STYLE_SQUARE
sectionBtn = PsychopyPlateBtn(self, -1, label, style=_style, name=categ)
# Link to onclick functions
sectionBtn.Bind(wx.EVT_LEFT_DOWN, self.onSectionBtn)
sectionBtn.Bind(wx.EVT_RIGHT_DOWN, self.onSectionBtn)
# Set button background and link to onhover functions
#sectionBtn.Bind(wx.EVT_ENTER_WINDOW, self.onHover)
#sectionBtn.Bind(wx.EVT_LEAVE_WINDOW, self.offHover)
self.panels[categ] = wx.FlexGridSizer(cols=1)
self.sizer.Add(sectionBtn, flag=wx.EXPAND)
self.sizerList.append(sectionBtn)
self.sizer.Add(self.panels[categ], flag=wx.ALIGN_CENTER)
self.sizerList.append(self.panels[categ])
maxWidth = self.makeComponentButtons()
self._rightClicked = None
# start all except for Favorites collapsed
for section in categories[1:]:
self.toggleSection(self.panels[section])
self.Bind(wx.EVT_SIZE, self.on_resize)
self.SetSizer(self.sizer)
self.SetAutoLayout(True)
self.SetupScrolling()
# double buffered better rendering except if retina
self.SetDoubleBuffered(self.frame.IsDoubleBuffered())
self._applyAppTheme() # bitmaps only just loaded
def _applyAppTheme(self, target=None):
cs = ThemeMixin.appColors
# Style component panel
self.SetForegroundColour(cs['text'])
self.SetBackgroundColour(cs['panel_bg'])
# Style component buttons
for btn in self.componentButtons:
btn.SetForegroundColour(cs['text'])
btn.SetBackgroundColour(cs['panel_bg'])
# then apply to all children as well
for c in self.GetChildren():
if hasattr(c, '_applyAppTheme'):
# if the object understands themes then request that
c._applyAppTheme()
self.Refresh()
self.Update()
def on_resize(self, event):
cols = self.GetClientSize()[0] // self._maxBtnWidth
for category in list(self.panels.values()):
category.SetCols(max(1, cols))
def makeFavoriteButtons(self):
# add a copy of each favorite to that panel first
for thisName in self.favorites.getFavorites():
self.addComponentButton(thisName, self.panels['Favorites'])
def makeComponentButtons(self):
"""Make all the components buttons, including favorites
"""
self.makeFavoriteButtons()
# then add another copy for each category that the component itself
# lists
componentNames = list(self.components.keys())
componentNames.sort()
for thisName in componentNames:
thisComp = self.components[thisName]
# NB thisComp is a class - we can't use its methods/attribs until
# it is an instance
for category in thisComp.categories:
panel = self.panels[category]
self.addComponentButton(thisName, panel)
def addComponentButton(self, name, panel):
"""Create a component button and add it to a specific panel's sizer
"""
iconCache = self.app.iconCache
# get a shorter name too (without "Component")
shortName = name
for redundant in ['component', 'Component', "ButtonBox"]:
shortName = shortName.replace(redundant, "")
# Convert from CamelCase to Title Case for button label
label = ""
for i, c in enumerate(shortName):
if c.isupper() and i > 0:
label += "\n"
label += c
# set size
size = 48
# get tooltip
if name in components.tooltips:
thisTip = components.tooltips[name]
else:
thisTip = shortName
btn = iconCache.getComponentButton(
parent=self,
name=name,
label=label,
size=size,
tip=thisTip,
)
# btn will be none if a favorite is not found (e.g. user has multiple
# versions of psychopy installed
if btn is None:
return
# then set up positioning etc
btn.SetBitmapPosition(wx.TOP)
self.componentFromID[btn.GetId()] = name
# use btn.bind instead of self.Bind in oder to trap event here
btn.Bind(wx.EVT_RIGHT_DOWN, self.onRightClick)
self.Bind(wx.EVT_BUTTON, self.onClick, btn)
# ,wx.EXPAND|wx.ALIGN_CENTER )
panel.Add(btn, proportion=0, flag=wx.ALIGN_RIGHT)
self._maxBtnWidth = max(btn.GetSize()[0], self._maxBtnWidth)
self.componentButtons.append(btn)
def onSectionBtn(self, evt):
if hasattr(evt, 'GetString'):
buttons = self.panels[evt.GetString()]
else:
btn = evt.GetEventObject()
buttons = self.panels[btn.GetName()]
self.toggleSection(buttons)
def toggleSection(self, section):
ii = self.sizerList.index(section)
self.sizer.Show(ii, not self.sizer.IsShown(ii)) # ie toggle this item
self.sizer.Layout()
self.SetupScrolling()
def getIndexInSizer(self, obj, sizer):
"""Find index of an item within a sizer (to see if it's there
or to toggle visibility)
WX sizers don't (as of v2.8.11) have a way to find the index of
their contents. This method helps get around that.
"""
# if the obj is itself a sizer (e.g. within the main sizer then
# we can't even use sizer.Children (as far as I can work out)
# so we keep a list to track the contents.
# for the main sizer we kept track of everything with a list:
if sizer == self.sizer:
return self.sizerList.index(obj)
index = None
for ii, child in enumerate(sizer.Children):
if child.GetWindow() == obj:
index = ii
break
return index
def onRightClick(self, evt):
"""
Defines rightclick behavior within builder view's
components panel
"""
btn = evt.GetEventObject()
self._rightClicked = btn
index = self.getIndexInSizer(btn, self.panels['Favorites'])
if index is None:
# not currently in favs
msg = "Add to favorites"
clickFunc = self.onAddToFavorites
else:
# is currently in favs
msg = "Remove from favorites"
clickFunc = self.onRemFromFavorites
menu = wx.Menu()
id = wx.NewIdRef()
menu.Append(id, _localized[msg])
menu.Bind(wx.EVT_MENU, clickFunc, id=id)
# where to put the context menu
x, y = evt.GetPosition() # this is position relative to object
xBtn, yBtn = evt.GetEventObject().GetPosition()
self.PopupMenu(menu, (x + xBtn, y + yBtn))
menu.Destroy() # destroy to avoid mem leak
def onClick(self, evt, timeout=None):
"""
Defines left-click behavior for builder views components panel
:param: evt can be a wx.Event OR a component class name (MouseComponent)
"""
# get name of current routine
currRoutinePage = self.frame.routinePanel.getCurrentPage()
if not currRoutinePage:
msg = _translate("Create a routine (Experiment menu) "
"before adding components")
dialogs.MessageDialog(self, msg, type='Info',
title=_translate('Error')).ShowModal()
return False
currRoutine = self.frame.routinePanel.getCurrentRoutine()
# get component name
if hasattr(evt, "GetId"):
newClassStr = self.componentFromID[evt.GetId()]
else:
newClassStr = evt
newCompClass = self.components[newClassStr]
newComp = newCompClass(parentName=currRoutine.name,
exp=self.frame.exp)
# does this component have a help page?
if hasattr(newComp, 'url'):
helpUrl = newComp.url
else:
helpUrl = None
# create component template
if newClassStr == 'CodeComponent':
_Dlg = DlgCodeComponentProperties
else:
_Dlg = DlgComponentProperties
dlg = _Dlg(frame=self.frame,
title='{} Properties'.format(newComp.params['name']),
params=newComp.params, order=newComp.order,
helpUrl=helpUrl,
depends=newComp.depends,
timeout=timeout)
compName = newComp.params['name']
if dlg.OK:
currRoutine.addComponent(newComp) # add to the actual routing
namespace = self.frame.exp.namespace
newComp.params['name'].val = namespace.makeValid(
newComp.params['name'].val)
namespace.add(newComp.params['name'].val)
# update the routine's view with the new component too
currRoutinePage.redrawRoutine()
self.frame.addToUndoStack(
"ADD `%s` to `%s`" % (compName, currRoutine.name))
wasNotInFavs = (newClassStr not in self.favorites.getFavorites())
self.favorites.promoteComponent(newClassStr, 1)
# was that promotion enough to be a favorite?
if wasNotInFavs and newClassStr in self.favorites.getFavorites():
self.addComponentButton(newClassStr, self.panels['Favorites'])
self.sizer.Layout()
return True
def onAddToFavorites(self, evt=None, btn=None):
"""Defines Add To Favorites Menu Behavior"""
if btn is None:
btn = self._rightClicked
if btn.Name not in self.favorites.getFavorites():
# check we aren't duplicating
self.favorites.makeFavorite(btn.Name)
self.addComponentButton(btn.Name, self.panels['Favorites'])
self.sizer.Layout()
self._rightClicked = None
def onRemFromFavorites(self, evt=None, btn=None):
"""Defines Remove from Favorites Menu Behavior"""
if btn is None:
btn = self._rightClicked
index = self.getIndexInSizer(btn, self.panels['Favorites'])
if index is None:
pass
else:
self.favorites.setLevel(btn.Name, -100)
btn.Destroy()
self.sizer.Layout()
self._rightClicked = None
def onHover(self, evt):
cs = ThemeMixin.appColors
btn = evt.GetEventObject()
btn.SetBackgroundColour(cs['bmpbutton_bg_hover'])
btn.SetForegroundColour(cs['bmpbutton_fg_hover'])
def offHover(self, evt):
cs = ThemeMixin.appColors
btn = evt.GetEventObject()
btn.SetBackgroundColour(cs['panel_bg'])
btn.SetForegroundColour(cs['text'])
class FavoriteComponents(object):
"""Defines the Favorite Components Object class, meant for dealing with
the user's frequently accessed components"""
def __init__(self, componentsPanel, threshold=20, neutral=0):
super(FavoriteComponents, self).__init__()
self.threshold = 20
self.neutral = 0
self.panel = componentsPanel
self.frame = componentsPanel.frame
self.app = self.frame.app
self.prefs = self.app.prefs
self.currentLevels = self.prefs.appDataCfg['builder']['favComponents']
self.setDefaults()
def setDefaults(self):
"""Defines Default Favorite Components"""
# set those that are favorites by default
for comp in ('ImageComponent', 'KeyboardComponent',
'SoundComponent', 'TextComponent',
'MouseComponent', 'SliderComponent',
):
if comp not in self.currentLevels or self.currentLevels[comp] != 0:
self.currentLevels[comp] = self.threshold
for comp in self.panel.components:
if comp not in self.currentLevels:
self.currentLevels[comp] = self.neutral
def makeFavorite(self, compName):
"""Set the value of this component to an arbitrary high value (10000)
"""
self.currentLevels[compName] = 10000
def promoteComponent(self, compName, value=1):
"""Promote this component by a certain value (negative to demote)
"""
self.currentLevels[compName] += value
def setLevel(self, compName, value=0):
"""Set the level to neutral (0) favourite (20?) or banned (-1000?)
"""
self.currentLevels[compName] = value
def getFavorites(self):
"""Returns a list of favorite components. Each must have level greater
than the threshold and there will be not more than
max length prefs['builder']['maxFavorites']
"""
sortedVals = sorted(list(self.currentLevels.items()),
key=lambda x: x[1], reverse=True)
favorites = []
maxFav = self.prefs.builder['maxFavorites']
for name, level in sortedVals:
# this has been explicitly requested (or REALLY liked!)
if level >= 10000:
favorites.append(name)
elif level >= self.threshold and len(favorites) < maxFav:
favorites.append(name)
else:
# either we've run out of levels>10000 or exceeded maxFavs or
# run out of level >= thresh
break
return favorites
class ReadmeFrame(wx.Frame):
"""Defines construction of the Readme Frame"""
def __init__(self, parent):
"""
A frame for presenting/loading/saving readme files
"""
self.parent = parent
title = "%s readme" % (parent.exp.name)
self._fileLastModTime = None
pos = wx.Point(parent.Position[0] + 80, parent.Position[1] + 80)
_style = wx.DEFAULT_FRAME_STYLE | wx.FRAME_FLOAT_ON_PARENT
wx.Frame.__init__(self, parent, title=title,
size=(600, 500), pos=pos, style=_style)
self.Bind(wx.EVT_CLOSE, self.onClose)
self.Hide()
self.makeMenus()
self.ctrl = wx.TextCtrl(self, style=wx.TE_MULTILINE)
def onClose(self, evt=None):
"""
Defines behavior on close of the Readme Frame
"""
self.parent.readmeFrame = None
self.Destroy()
def makeMenus(self):
"""Produces menus for the Readme Frame"""
# ---Menus---#000000#FFFFFF-------------------------------------------
menuBar = wx.MenuBar()
# ---_file---#000000#FFFFFF-------------------------------------------
self.fileMenu = wx.Menu()
menuBar.Append(self.fileMenu, _translate('&File'))
menu = self.fileMenu
keys = self.parent.app.keys
menu.Append(wx.ID_SAVE, _translate("&Save\t%s") % keys['save'])
menu.Append(wx.ID_CLOSE,
_translate("&Close readme\t%s") % keys['close'])
item = menu.Append(-1,
_translate("&Toggle readme\t%s") % keys[
'toggleReadme'],
_translate("Toggle Readme"))
self.Bind(wx.EVT_MENU, self.toggleVisible, item)
self.Bind(wx.EVT_MENU, self.fileSave, id=wx.ID_SAVE)
self.Bind(wx.EVT_MENU, self.toggleVisible, id=wx.ID_CLOSE)
self.SetMenuBar(menuBar)
def setFile(self, filename):
"""Sets the readme file found with current builder experiment"""
self.filename = filename
self.expName = self.parent.exp.getExpName()
# check we can read
if filename is None: # check if we can write to the directory
return False
elif not os.path.exists(filename):
self.filename = None
return False
elif not os.access(filename, os.R_OK):
msg = "Found readme file (%s) no read permissions"
logging.warning(msg % filename)
return False
# attempt to open
try:
f = codecs.open(filename, 'r', 'utf-8-sig')
except IOError as err:
msg = ("Found readme file for %s and appear to have"
" permissions, but can't open")
logging.warning(msg % self.expName)
logging.warning(err)
return False
# attempt to read
try:
readmeText = f.read().replace("\r\n", "\n")
except Exception:
msg = ("Opened readme file for %s it but failed to read it "
"(not text/unicode?)")
logging.error(msg % self.expName)
return False
f.close()
self._fileLastModTime = os.path.getmtime(filename)
self.ctrl.SetValue(readmeText)
self.SetTitle("%s readme (%s)" % (self.expName, filename))
def fileSave(self, evt=None):
"""Defines save behavior for readme frame"""
mtime = os.path.getmtime(self.filename)
if self._fileLastModTime and mtime > self._fileLastModTime:
logging.warning(
'readme file has been changed by another programme?')
txt = self.ctrl.GetValue()
with codecs.open(self.filename, 'w', 'utf-8-sig') as f:
f.write(txt)
def toggleVisible(self, evt=None):
"""Defines visibility toggle for readme frame"""
if self.IsShown():
self.Hide()
else:
self.Show()
class ExportFileDialog(wx.Dialog):
def __init__(self, parent, ID, title, size=wx.DefaultSize,
pos=wx.DefaultPosition, style=wx.DEFAULT_DIALOG_STYLE,
filePath=None, exp=None):
wx.Dialog.__init__(self, parent, ID, title,
size=size, pos=pos, style=style)
# Now continue with the normal construction of the dialog
# contents
self.exp = exp
sizer = wx.BoxSizer(wx.VERTICAL)
box = wx.BoxSizer(wx.HORIZONTAL)
label = wx.StaticText(self, wx.ID_ANY, _translate("Filepath:"))
box.Add(label, 0, wx.ALIGN_CENTRE | wx.ALL, 5)
if len(filePath) > 70:
filePath = filePath[:20] + "....." + filePath[-40:]
self.filePath = wx.StaticText(self, wx.ID_ANY, filePath, size=(500, -1))
box.Add(self.filePath, 1, wx.ALIGN_CENTRE | wx.ALL, 5)
sizer.Add(box, 0, wx.GROW | wx.ALL, 5)
# Set save on export HTML choice
box = wx.BoxSizer(wx.HORIZONTAL)
choices = ['on Save', 'on Sync', 'manually']
exportLabel = _translate("Select 'manually' to receive this alert when exporting HTML.\n"
"Click 'OK' to export HTML, or click 'Cancel' to return.")
self.exportOnSave = wx.Choice(self, wx.ID_ANY,
size=wx.DefaultSize,
choices=choices)
self.exportOnSave.SetSelection(choices.index(self.exp.settings.params['exportHTML']))
self.exportText = wx.StaticText(self, wx.ID_ANY, exportLabel)
self.exportOnSave.SetHelpText(exportLabel)
box.Add(self.exportOnSave, .5, wx.ALIGN_CENTRE | wx.ALL, 5)
box.Add(self.exportText, 1, wx.ALIGN_CENTRE | wx.ALL, 5)
sizer.Add(box, 0, wx.GROW | wx.ALL, 5)
line = wx.StaticLine(self, wx.ID_ANY, size=(20, -1),
style=wx.LI_HORIZONTAL)
sizer.Add(line, 0,
wx.GROW | wx.RIGHT | wx.TOP, 5)
btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
btn.SetHelpText("The OK button completes the dialog")
btn.SetDefault()
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
btn.SetHelpText("The Cancel button cancels the dialog. (Crazy, huh?)")
btnsizer.AddButton(btn)
sizer.Add(btnsizer, 0, wx.ALL, 5)
self.SetSizerAndFit(sizer)
class FlowPanel(wx.ScrolledWindow):
def __init__(self, frame, id=-1):
"""A panel that shows how the routines will fit together
"""
self.frame = frame
self.app = frame.app
self.dpi = self.app.dpi
wx.ScrolledWindow.__init__(self, frame, id, (0, 0),
size=wx.Size(8 * self.dpi, 3 * self.dpi),
style=wx.HSCROLL | wx.VSCROLL | wx.BORDER_NONE)
self.needUpdate = True
self.maxWidth = 50 * self.dpi
self.maxHeight = 2 * self.dpi
self.mousePos = None
# if we're adding a loop or routine then add spots to timeline
# self.drawNearestRoutinePoint = True
# self.drawNearestLoopPoint = False
# lists the x-vals of points to draw, eg loop locations:
self.pointsToDraw = []
# for flowSize, showLoopInfoInFlow:
self.appData = self.app.prefs.appData
# self.SetAutoLayout(True)
self.SetScrollRate(self.dpi / 4, self.dpi / 4)
# create a PseudoDC to record our drawing
self.pdc = PseudoDC()
if parse_version(wx.__version__) < parse_version('4.0.0a1'):
self.pdc.DrawRoundedRectangle = self.pdc.DrawRoundedRectangleRect
self.pen_cache = {}
self.brush_cache = {}
# vars for handling mouse clicks
self.hitradius = 5
self.dragid = -1
self.entryPointPosList = []
self.entryPointIDlist = []
self.gapsExcluded = []
# mode can also be 'loopPoint1','loopPoint2','routinePoint'
self.mode = 'normal'
self.insertingRoutine = ""
# for the context menu use the ID of the drawn icon to retrieve
# the component (loop or routine)
self.componentFromID = {}
self.contextMenuLabels = {
'remove': _translate('remove'),
'rename': _translate('rename')}
self.contextMenuItems = ['remove', 'rename']
self.contextItemFromID = {}
self.contextIDFromItem = {}
for item in self.contextMenuItems:
id = wx.NewIdRef()
self.contextItemFromID[id] = item
self.contextIDFromItem[item] = id
# self.btnInsertRoutine = wx.Button(self,-1,
# 'Insert Routine', pos=(10,10))
# self.btnInsertLoop = wx.Button(self,-1,'Insert Loop', pos=(10,30))
labelRoutine = _translate('Insert Routine ')
labelLoop = _translate('Insert Loop ')
btnHeight = 50
# Create add routine button
self.btnInsertRoutine = PsychopyPlateBtn(
self, -1, labelRoutine, pos=(10, 10), size=(120, btnHeight),
style=platebtn.PB_STYLE_SQUARE
)
# Create add loop button
self.btnInsertLoop = PsychopyPlateBtn(
self, -1, labelLoop, pos=(10, btnHeight+20),
size=(120, btnHeight),
style=platebtn.PB_STYLE_SQUARE
) # spaces give size for CANCEL
# use self.appData['flowSize'] to index a tuple to get a specific
# value, eg: (4,6,8)[self.appData['flowSize']]
self.flowMaxSize = 2 # upper limit on increaseSize
# bind events
self.Bind(wx.EVT_MOUSE_EVENTS, self.OnMouse)
self.Bind(wx.EVT_BUTTON, self.onInsertRoutine, self.btnInsertRoutine)
self.Bind(wx.EVT_BUTTON, self.setLoopPoint1, self.btnInsertLoop)
self.Bind(wx.EVT_PAINT, self.OnPaint)
idClear = wx.NewIdRef()
self.Bind(wx.EVT_MENU, self.clearMode, id=idClear)
aTable = wx.AcceleratorTable([
(wx.ACCEL_NORMAL, wx.WXK_ESCAPE, idClear)
])
self.SetAcceleratorTable(aTable)
# double buffered better rendering except if retina
self.SetDoubleBuffered(self.frame.IsDoubleBuffered())
self._applyAppTheme()
def _applyAppTheme(self, target=None):
"""Apply any changes which have been made to the theme since panel was last loaded"""
cs = ThemeMixin.appColors
# Style loop/routine buttons
self.btnInsertLoop.SetBackgroundColour(cs['frame_bg'])
self.btnInsertLoop.SetForegroundColour(cs['text'])
self.btnInsertLoop.Update()
self.btnInsertRoutine.SetBackgroundColour(cs['frame_bg'])
self.btnInsertRoutine.SetForegroundColour(cs['text'])
self.btnInsertRoutine.Update()
# Set background
self.SetBackgroundColour(cs['panel_bg'])
self.draw()
def clearMode(self, event=None):
"""If we were in middle of doing something (like inserting routine)
then end it, allowing user to cancel
"""
self.mode = 'normal'
self.insertingRoutine = None
for id in self.entryPointIDlist:
self.pdc.RemoveId(id)
self.entryPointPosList = []
self.entryPointIDlist = []
self.gapsExcluded = []
self.draw()
self.frame.SetStatusText("")
self.btnInsertRoutine.SetLabel(_translate('Insert Routine'))
self.btnInsertRoutine.Update()
self.btnInsertLoop.SetLabel(_translate('Insert Loop'))
self.btnInsertRoutine.Update()
def ConvertEventCoords(self, event):
xView, yView = self.GetViewStart()
xDelta, yDelta = self.GetScrollPixelsPerUnit()
return (event.GetX() + (xView * xDelta),
event.GetY() + (yView * yDelta))
def OffsetRect(self, r):
"""Offset the rectangle, r, to appear in the given position
in the window
"""
xView, yView = self.GetViewStart()
xDelta, yDelta = self.GetScrollPixelsPerUnit()
r.Offset((-(xView * xDelta), -(yView * yDelta)))
def onInsertRoutine(self, evt):
"""For when the insert Routine button is pressed - bring up
dialog and present insertion point on flow line.
see self.insertRoutine() for further info
"""
if self.mode.startswith('loopPoint'):
self.clearMode()
elif self.mode == 'routine':
# clicked again with label now being "Cancel..."
self.clearMode()
return
self.frame.SetStatusText(_translate(
"Select a Routine to insert (Esc to exit)"))
menu = wx.Menu()
self.routinesFromID = {}
id = wx.NewIdRef()
menu.Append(id, '(new)')
self.routinesFromID[id] = '(new)'
menu.Bind(wx.EVT_MENU, self.insertNewRoutine, id=id)
for routine in self.frame.exp.routines:
id = wx.NewIdRef()
menu.Append(id, routine)
self.routinesFromID[id] = routine
menu.Bind(wx.EVT_MENU, self.onInsertRoutineSelect, id=id)
btnPos = self.btnInsertRoutine.GetRect()
menuPos = (btnPos[0], btnPos[1] + btnPos[3])
self.PopupMenu(menu, menuPos)
menu.Bind(wx.EVT_MENU_CLOSE, self.clearMode)
menu.Destroy() # destroy to avoid mem leak
def insertNewRoutine(self, event):
"""selecting (new) is a short-cut for:
make new routine, insert it into the flow
"""
newRoutine = self.frame.routinePanel.createNewRoutine(returnName=True)
if newRoutine:
self.routinesFromID[event.GetId()] = newRoutine
self.onInsertRoutineSelect(event)
else:
self.clearMode()
def onInsertRoutineSelect(self, event):
"""User has selected a routine to be entered so bring up the
entrypoint marker and await mouse button press.
see self.insertRoutine() for further info
"""
self.mode = 'routine'
self.btnInsertRoutine.SetLabel(_translate('CANCEL Insert'))
self.frame.SetStatusText(_translate(
'Click where you want to insert the Routine, or CANCEL insert.'))
self.insertingRoutine = self.routinesFromID[event.GetId()]
x = self.getNearestGapPoint(0)
self.drawEntryPoints([x])
def insertRoutine(self, ii):
"""Insert a routine into the Flow knowing its name and location
onInsertRoutine() the button has been pressed so present menu
onInsertRoutineSelect() user selected the name so present entry points
OnMouse() user has selected a point on the timeline to insert entry
"""
rtn = self.frame.exp.routines[self.insertingRoutine]
self.frame.exp.flow.addRoutine(rtn, ii)
self.frame.addToUndoStack("ADD Routine `%s`" % rtn.name)
# reset flow drawing (remove entry point)
self.clearMode()
def setLoopPoint1(self, evt=None):
"""Someone pushed the insert loop button.
Fetch the dialog
"""
if self.mode == 'routine':
self.clearMode()
# clicked again, label is "Cancel..."
elif self.mode.startswith('loopPoint'):
self.clearMode()
return
self.btnInsertLoop.SetLabel(_translate('CANCEL insert'))
self.mode = 'loopPoint1'
self.frame.SetStatusText(_translate(
'Click where you want the loop to start/end, or CANCEL insert.'))
x = self.getNearestGapPoint(0)
self.drawEntryPoints([x])
def setLoopPoint2(self, evt=None):
"""We have the location of the first point, waiting to get the second
"""
self.mode = 'loopPoint2'
self.frame.SetStatusText(_translate(
'Click the other end for the loop'))
thisPos = self.entryPointPosList[0]
self.gapsExcluded = [thisPos]
self.gapsExcluded.extend(self.getGapPointsCrossingStreams(thisPos))
# is there more than one available point
diff = wx.GetMousePosition()[0] - self.GetScreenPosition()[0]
x = self.getNearestGapPoint(diff, exclude=self.gapsExcluded)
self.drawEntryPoints([self.entryPointPosList[0], x])
nAvailableGaps = len(self.gapMidPoints) - len(self.gapsExcluded)
if nAvailableGaps == 1:
self.insertLoop() # there's only one place - use it
def insertLoop(self, evt=None):
# bring up listbox to choose the routine to add, and / or a new one
loopDlg = DlgLoopProperties(frame=self.frame,
helpUrl=self.app.urls['builder.loops'])
startII = self.gapMidPoints.index(min(self.entryPointPosList))
endII = self.gapMidPoints.index(max(self.entryPointPosList))
if loopDlg.OK:
handler = loopDlg.currentHandler
self.frame.exp.flow.addLoop(handler,
startPos=startII, endPos=endII)
action = "ADD Loop `%s` to Flow" % handler.params['name'].val
self.frame.addToUndoStack(action)
self.clearMode()
self.draw()
def increaseSize(self, event=None):
if self.appData['flowSize'] == self.flowMaxSize:
self.appData['showLoopInfoInFlow'] = True
self.appData['flowSize'] = min(
self.flowMaxSize, self.appData['flowSize'] + 1)
self.clearMode() # redraws
def decreaseSize(self, event=None):
if self.appData['flowSize'] == 0:
self.appData['showLoopInfoInFlow'] = False
self.appData['flowSize'] = max(0, self.appData['flowSize'] - 1)
self.clearMode() # redraws
def editLoopProperties(self, event=None, loop=None):
# add routine points to the timeline
self.setDrawPoints('loops')
self.draw()
if 'conditions' in loop.params:
condOrig = loop.params['conditions'].val
condFileOrig = loop.params['conditionsFile'].val
title = loop.params['name'].val + ' Properties'
loopDlg = DlgLoopProperties(frame=self.frame,
helpUrl=self.app.urls['builder.loops'],
title=title, loop=loop)
if loopDlg.OK:
prevLoop = loop
if loopDlg.params['loopType'].val == 'staircase':
loop = loopDlg.stairHandler
elif loopDlg.params['loopType'].val == 'interleaved staircases':
loop = loopDlg.multiStairHandler
else:
# ['random','sequential', 'fullRandom', ]
loop = loopDlg.trialHandler
# if the loop is a whole new class then we can't just update the
# params
if loop.getType() != prevLoop.getType():
# get indices for start and stop points of prev loop
flow = self.frame.exp.flow
# find the index of the initiator
startII = flow.index(prevLoop.initiator)
# minus one because initiator will have been deleted
endII = flow.index(prevLoop.terminator) - 1
# remove old loop completely
flow.removeComponent(prevLoop)
# finally insert the new loop
flow.addLoop(loop, startII, endII)
self.frame.addToUndoStack("EDIT Loop `%s`" %
(loop.params['name'].val))
elif 'conditions' in loop.params:
loop.params['conditions'].val = condOrig
loop.params['conditionsFile'].val = condFileOrig
# remove the points from the timeline
self.setDrawPoints(None)
self.draw()
def OnMouse(self, event):
x, y = self.ConvertEventCoords(event)
handlerTypes = ('StairHandler', 'TrialHandler', 'MultiStairHandler')
if self.mode == 'normal':
if event.LeftDown():
icons = self.pdc.FindObjectsByBBox(x, y)
for thisIcon in icons:
# might intersect several and only one has a callback
if thisIcon in self.componentFromID:
comp = self.componentFromID[thisIcon]
if comp.getType() in handlerTypes:
self.editLoopProperties(loop=comp)
if comp.getType() == 'Routine':
self.frame.routinePanel.setCurrentRoutine(
routine=comp)
elif event.RightDown():
icons = self.pdc.FindObjectsByBBox(x, y)
# todo: clean-up remove `comp`, its unused
comp = None
for thisIcon in icons:
# might intersect several and only one has a callback
if thisIcon in self.componentFromID:
# loop through comps looking for Routine, or a Loop if
# no routine
thisComp = self.componentFromID[thisIcon]
if thisComp.getType() in handlerTypes:
comp = thisComp # unused
icon = thisIcon
if thisComp.getType() == 'Routine':
comp = thisComp
icon = thisIcon
break # we've found a Routine so stop looking
self.frame.routinePanel.setCurrentRoutine(comp)
try:
self._menuComponentID = icon
xy = wx.Point(x + self.GetPosition()[0],
y + self.GetPosition()[1])
self.showContextMenu(self._menuComponentID, xy=xy)
except UnboundLocalError:
# right click but not on an icon
# might as well do something
self.Refresh()
elif self.mode == 'routine':
if event.LeftDown():
pt = self.entryPointPosList[0]
self.insertRoutine(ii=self.gapMidPoints.index(pt))
else: # move spot if needed
point = self.getNearestGapPoint(mouseX=x)
self.drawEntryPoints([point])
elif self.mode == 'loopPoint1':
if event.LeftDown():
self.setLoopPoint2()
else: # move spot if needed
point = self.getNearestGapPoint(mouseX=x)
self.drawEntryPoints([point])
elif self.mode == 'loopPoint2':
if event.LeftDown():
self.insertLoop()
else: # move spot if needed
point = self.getNearestGapPoint(mouseX=x,
exclude=self.gapsExcluded)
self.drawEntryPoints([self.entryPointPosList[0], point])
def getNearestGapPoint(self, mouseX, exclude=()):
"""Get gap that is nearest to a particular mouse location
"""
d = 1000000000
nearest = None
for point in self.gapMidPoints:
if point in exclude:
continue
if (point - mouseX) ** 2 < d:
d = (point - mouseX) ** 2
nearest = point
return nearest
def getGapPointsCrossingStreams(self, gapPoint):
"""For a given gap point, identify the gap points that are
excluded by crossing a loop line
"""
gapArray = numpy.array(self.gapMidPoints)
nestLevels = numpy.array(self.gapNestLevels)
thisLevel = nestLevels[gapArray == gapPoint]
invalidGaps = (gapArray[nestLevels != thisLevel]).tolist()
return invalidGaps
def showContextMenu(self, component, xy):
menu = wx.Menu()
# get ID
# the ID is also the index to the element in the flow list
compID = self._menuComponentID
flow = self.frame.exp.flow
component = flow[compID]
compType = component.getType()
if compType == 'Routine':
for item in self.contextMenuItems:
id = self.contextIDFromItem[item]
menu.Append(id, self.contextMenuLabels[item])
menu.Bind(wx.EVT_MENU, self.onContextSelect, id=id)
self.frame.PopupMenu(menu, xy)
# destroy to avoid mem leak:
menu.Destroy()
else:
for item in self.contextMenuItems:
if item == 'rename':
continue
id = self.contextIDFromItem[item]
menu.Append(id, self.contextMenuLabels[item])
menu.Bind(wx.EVT_MENU, self.onContextSelect, id=id)
self.frame.PopupMenu(menu, xy)
# destroy to avoid mem leak:
menu.Destroy()
def onContextSelect(self, event):
"""Perform a given action on the component chosen
"""
# get ID
op = self.contextItemFromID[event.GetId()]
# the ID is also the index to the element in the flow list
compID = self._menuComponentID
flow = self.frame.exp.flow
component = flow[compID]
# if we have a Loop Initiator, remove the whole loop
if component.getType() == 'LoopInitiator':
component = component.loop
if op == 'remove':
self.removeComponent(component, compID)
self.frame.addToUndoStack(
"REMOVE `%s` from Flow" % component.params['name'])
if op == 'rename':
self.frame.renameRoutine(component)
def removeComponent(self, component, compID):
"""Remove either a Routine or a Loop from the Flow
"""
flow = self.frame.exp.flow
if component.getType() == 'Routine':
# check whether this will cause a collapsed loop
# prev and next elements on flow are a loop init/end
prevIsLoop = nextIsLoop = False
if compID > 0: # there is at least one preceding
prevIsLoop = (flow[compID - 1]).getType() == 'LoopInitiator'
if len(flow) > (compID + 1): # there is at least one more compon
nextIsLoop = (flow[compID + 1]).getType() == 'LoopTerminator'
if prevIsLoop and nextIsLoop:
# because flow[compID+1] is a terminator
loop = flow[compID + 1].loop
msg = _translate('The "%s" Loop is about to be deleted as '
'well (by collapsing). OK to proceed?')
title = _translate('Impending Loop collapse')
warnDlg = dialogs.MessageDialog(
parent=self.frame, message=msg % loop.params['name'],
type='Warning', title=title)
resp = warnDlg.ShowModal()
if resp in [wx.ID_CANCEL, wx.ID_NO]:
return # abort
elif resp == wx.ID_YES:
# make recursive calls to this same method until success
# remove the loop first
self.removeComponent(loop, compID)
# because the loop has been removed ID is now one less
self.removeComponent(component, compID - 1)
return # have done the removal in final successful call
# remove name from namespace only if it's a loop;
# loops exist only in the flow
elif 'conditionsFile' in component.params:
conditionsFile = component.params['conditionsFile'].val
if conditionsFile and conditionsFile not in ['None', '']:
try:
trialList, fieldNames = data.importConditions(
conditionsFile, returnFieldNames=True)
for fname in fieldNames:
self.frame.exp.namespace.remove(fname)
except Exception:
msg = ("Conditions file %s couldn't be found so names not"
" removed from namespace")
logging.debug(msg % conditionsFile)
self.frame.exp.namespace.remove(component.params['name'].val)
# perform the actual removal
flow.removeComponent(component, id=compID)
self.draw()
def OnPaint(self, event):
# Create a buffered paint DC. It will create the real
# wx.PaintDC and then blit the bitmap to it when dc is
# deleted.
dc = wx.GCDC(wx.BufferedPaintDC(self))
# use PrepareDC to set position correctly
self.PrepareDC(dc)
# we need to clear the dc BEFORE calling PrepareDC
bg = wx.Brush(self.GetBackgroundColour())
dc.SetBackground(bg)
dc.Clear()
# create a clipping rect from our position and size
# and the Update Region
xv, yv = self.GetViewStart()
dx, dy = self.GetScrollPixelsPerUnit()
x, y = (xv * dx, yv * dy)
rgn = self.GetUpdateRegion()
rgn.Offset(x, y)
r = rgn.GetBox()
# draw to the dc using the calculated clipping rect
self.pdc.DrawToDCClipped(dc, r)
def draw(self, evt=None):
"""This is the main function for drawing the Flow panel.
It should be called whenever something changes in the exp.
This then makes calls to other drawing functions,
like drawEntryPoints...
"""
if not hasattr(self.frame, 'exp'):
# we haven't yet added an exp
return
# retrieve the current flow from the experiment
expFlow = self.frame.exp.flow
pdc = self.pdc
# use the ID of the drawn icon to retrieve component (loop or routine)
self.componentFromID = {}
pdc.Clear() # clear the screen
pdc.RemoveAll() # clear all objects (icon buttons)
font = self.GetFont()
# draw the main time line
self.linePos = (2.5 * self.dpi, 0.5 * self.dpi) # x,y of start
gap = self.dpi / (6, 4, 2)[self.appData['flowSize']]
dLoopToBaseLine = (15, 25, 43)[self.appData['flowSize']]
dBetweenLoops = (20, 24, 30)[self.appData['flowSize']]
# guess virtual size; nRoutines wide by nLoops high
# make bigger than needed and shrink later
nRoutines = len(expFlow)
nLoops = 0
for entry in expFlow:
if entry.getType() == 'LoopInitiator':
nLoops += 1
sizeX = nRoutines * self.dpi * 2
sizeY = nLoops * dBetweenLoops + dLoopToBaseLine * 3
self.SetVirtualSize(size=(sizeX, sizeY))
# step through components in flow, get spacing from text size, etc
currX = self.linePos[0]
lineId = wx.NewIdRef()
pdc.SetPen(wx.Pen(colour=cs['fl_flowline_bg']))
pdc.DrawLine(x1=self.linePos[0] - gap, y1=self.linePos[1],
x2=self.linePos[0], y2=self.linePos[1])
# NB the loop is itself the key, value is further info about it
self.loops = {}
nestLevel = 0
maxNestLevel = 0
self.gapMidPoints = [currX - gap / 2]
self.gapNestLevels = [0]
for ii, entry in enumerate(expFlow):
if entry.getType() == 'LoopInitiator':
# NB the loop is itself the dict key!?
self.loops[entry.loop] = {
'init': currX, 'nest': nestLevel, 'id': ii}
nestLevel += 1 # start of loop so increment level of nesting
maxNestLevel = max(nestLevel, maxNestLevel)
elif entry.getType() == 'LoopTerminator':
# NB the loop is itself the dict key!
self.loops[entry.loop]['term'] = currX
nestLevel -= 1 # end of loop so decrement level of nesting
elif entry.getType() == 'Routine':
# just get currX based on text size, don't draw anything yet:
currX = self.drawFlowRoutine(pdc, entry, id=ii,
pos=[currX, self.linePos[1] - 10],
draw=False)
self.gapMidPoints.append(currX + gap / 2)
self.gapNestLevels.append(nestLevel)
pdc.SetId(lineId)
pdc.SetPen(wx.Pen(colour=cs['fl_flowline_bg']))
pdc.DrawLine(x1=currX, y1=self.linePos[1],
x2=currX + gap, y2=self.linePos[1])
currX += gap
lineRect = wx.Rect(self.linePos[0] - 2, self.linePos[1] - 2,
currX - self.linePos[0] + 2, 4)
pdc.SetIdBounds(lineId, lineRect)
# draw the loops first:
maxHeight = 0
for thisLoop in self.loops:
thisInit = self.loops[thisLoop]['init']
thisTerm = self.loops[thisLoop]['term']
thisNest = maxNestLevel - self.loops[thisLoop]['nest'] - 1
thisId = self.loops[thisLoop]['id']
height = (self.linePos[1] + dLoopToBaseLine +
thisNest * dBetweenLoops)
self.drawLoop(pdc, thisLoop, id=thisId,
startX=thisInit, endX=thisTerm,
base=self.linePos[1], height=height)
self.drawLoopStart(pdc, pos=[thisInit, self.linePos[1]])
self.drawLoopEnd(pdc, pos=[thisTerm, self.linePos[1]])
if height > maxHeight:
maxHeight = height
# draw routines second (over loop lines):
currX = self.linePos[0]
for ii, entry in enumerate(expFlow):
if entry.getType() == 'Routine':
currX = self.drawFlowRoutine(pdc, entry, id=ii,
pos=[currX, self.linePos[1] - 10])
pdc.SetPen(wx.Pen(wx.Pen(colour=cs['fl_flowline_bg'])))
pdc.DrawLine(x1=currX, y1=self.linePos[1],
x2=currX + gap, y2=self.linePos[1])
currX += gap
self.SetVirtualSize(size=(currX + 100, maxHeight + 50))
self.drawLineStart(pdc, (self.linePos[0] - gap, self.linePos[1]))
self.drawLineEnd(pdc, (currX, self.linePos[1]))
# refresh the visible window after drawing (using OnPaint)
self.Refresh()
def drawEntryPoints(self, posList):
ptSize = (3, 4, 5)[self.appData['flowSize']]
for n, pos in enumerate(posList):
if n >= len(self.entryPointPosList):
# draw for first time
id = wx.NewIdRef()
self.entryPointIDlist.append(id)
self.pdc.SetId(id)
self.pdc.SetBrush(wx.Brush(cs['fl_flowline_bg']))
self.pdc.DrawCircle(pos, self.linePos[1], ptSize)
r = self.pdc.GetIdBounds(id)
self.OffsetRect(r)
self.RefreshRect(r, False)
elif pos == self.entryPointPosList[n]:
pass # nothing to see here, move along please :-)
else:
# move to new position
dx = pos - self.entryPointPosList[n]
dy = 0
r = self.pdc.GetIdBounds(self.entryPointIDlist[n])
self.pdc.TranslateId(self.entryPointIDlist[n], dx, dy)
r2 = self.pdc.GetIdBounds(self.entryPointIDlist[n])
# combine old and new locations to get redraw area
rectToRedraw = r.Union(r2)
rectToRedraw.Inflate(4, 4)
self.OffsetRect(rectToRedraw)
self.RefreshRect(rectToRedraw, False)
self.entryPointPosList = posList
# refresh the visible window after drawing (using OnPaint)
self.Refresh()
def setDrawPoints(self, ptType, startPoint=None):
"""Set the points of 'routines', 'loops', or None
"""
if ptType == 'routines':
self.pointsToDraw = self.gapMidPoints
elif ptType == 'loops':
self.pointsToDraw = self.gapMidPoints
else:
self.pointsToDraw = []
def drawLineStart(self, dc, pos):
# draw bar at start of timeline; circle looked bad, offset vertically
ptSize = (9, 9, 12)[self.appData['flowSize']]
thic = (1, 1, 2)[self.appData['flowSize']]
dc.SetBrush(wx.Brush(cs['fl_flowline_bg']))
dc.SetPen(wx.Pen(cs['fl_flowline_bg']))
dc.DrawPolygon([[0, -ptSize], [thic, -ptSize],
[thic, ptSize], [0, ptSize]], pos[0], pos[1])
def drawLineEnd(self, dc, pos):
# draws arrow at end of timeline
# tmpId = wx.NewIdRef()
# dc.SetId(tmpId)
dc.SetBrush(wx.Brush(cs['fl_flowline_bg']))
dc.SetPen(wx.Pen(cs['fl_flowline_bg']))
dc.DrawPolygon([[0, -3], [5, 0], [0, 3]], pos[0], pos[1])
# dc.SetIdBounds(tmpId,wx.Rect(pos[0],pos[1]+3,5,6))
def drawLoopEnd(self, dc, pos, downwards=True):
# define the right side of a loop but draw nothing
# idea: might want an ID for grabbing and relocating the loop endpoint
tmpId = wx.NewIdRef()
dc.SetId(tmpId)
# dc.SetBrush(wx.Brush(wx.Colour(0,0,0, 250)))
# dc.SetPen(wx.Pen(wx.Colour(0,0,0, 255)))
size = (3, 4, 5)[self.appData['flowSize']]
# if downwards:
# dc.DrawPolygon([[size, 0], [0, size], [-size, 0]],
# pos[0], pos[1] + 2 * size) # points down
# else:
# dc.DrawPolygon([[size, size], [0, 0], [-size, size]],
# pos[0], pos[1]-3*size) # points up
dc.SetIdBounds(tmpId, wx.Rect(
pos[0] - size, pos[1] - size, 2 * size, 2 * size))
return
def drawLoopStart(self, dc, pos, downwards=True):
# draws direction arrow on left side of a loop
tmpId = wx.NewIdRef()
dc.SetId(tmpId)
dc.SetBrush(wx.Brush(cs['fl_flowline_bg']))
dc.SetPen(wx.Pen(cs['fl_flowline_bg']))
size = (3, 4, 5)[self.appData['flowSize']]
offset = (3, 2, 0)[self.appData['flowSize']]
if downwards:
dc.DrawPolygon([[size, size], [0, 0], [-size, size]],
pos[0], pos[1] + 3 * size - offset) # points up
else:
dc.DrawPolygon([[size, 0], [0, size], [-size, 0]],
pos[0], pos[1] - 4 * size) # points down
dc.SetIdBounds(tmpId, wx.Rect(
pos[0] - size, pos[1] - size, 2 * size, 2 * size))
def drawFlowRoutine(self, dc, routine, id, pos=[0, 0], draw=True):
"""Draw a box to show a routine on the timeline
draw=False is for a dry-run, esp to compute and return size
without drawing or setting a pdc ID
"""
name = routine.name
if self.appData['flowSize'] == 0 and len(name) > 5:
name = ' ' + name[:4] + '..'
else:
name = ' ' + name + ' '
if draw:
dc.SetId(id)
font = self.GetFont()
if sys.platform == 'darwin':
fontSizeDelta = (9, 6, 0)[self.appData['flowSize']]
font.SetPointSize(1400 / self.dpi - fontSizeDelta)
elif sys.platform.startswith('linux'):
fontSizeDelta = (6, 4, 0)[self.appData['flowSize']]
font.SetPointSize(1400 / self.dpi - fontSizeDelta)
else:
fontSizeDelta = (8, 4, 0)[self.appData['flowSize']]
font.SetPointSize(1000 / self.dpi - fontSizeDelta)
maxTime, nonSlip = routine.getMaxTime()
if nonSlip:
rtFill = cs['fl_routine_bg_nonslip']
rtEdge = cs['fl_routine_bg_nonslip']
rtText = cs['fl_routine_fg']
else:
rtFill = cs['fl_routine_bg_slip']
rtEdge = cs['fl_routine_bg_slip']
rtText = cs['fl_routine_fg']
# get size based on text
self.SetFont(font)
if draw:
dc.SetFont(font)
w, h = self.GetFullTextExtent(name)[0:2]
pad = (5, 10, 20)[self.appData['flowSize']]
# draw box
pos[1] += 2 - self.appData['flowSize']
rect = wx.Rect(pos[0], pos[1], w + pad, h + pad)
endX = pos[0] + w + pad
# the edge should match the text
if draw:
dc.SetPen(wx.Pen(wx.Colour(rtEdge[0], rtEdge[1],
rtEdge[2], wx.ALPHA_OPAQUE)))
dc.SetBrush(wx.Brush(rtFill))
dc.DrawRoundedRectangle(
rect, (4, 6, 8)[self.appData['flowSize']])
# draw text
dc.SetTextForeground(rtText)
dc.DrawLabel(name, rect, alignment=wx.ALIGN_CENTRE)
if nonSlip and self.appData['flowSize'] != 0:
font.SetPointSize(font.GetPointSize() * 0.6)
dc.SetFont(font)
_align = wx.ALIGN_CENTRE | wx.ALIGN_BOTTOM
dc.DrawLabel("(%.2fs)" % maxTime, rect, alignment=_align)
self.componentFromID[id] = routine
# set the area for this component
dc.SetIdBounds(id, rect)
return endX
def drawLoop(self, dc, loop, id, startX, endX,
base, height, downwards=True):
if downwards:
up = -1
else:
up = +1
# draw loop itself, as transparent rect with curved corners
tmpId = wx.NewIdRef()
dc.SetId(tmpId)
# extra distance, in both h and w for curve
curve = (6, 11, 15)[self.appData['flowSize']]
yy = [base, height + curve * up, height +
curve * up / 2, height] # for area
dc.SetPen(wx.Pen(cs['fl_flowline_bg']))
vertOffset = 0 # 1 is interesting too
area = wx.Rect(startX, base + vertOffset,
endX - startX, max(yy) - min(yy))
dc.SetBrush(wx.Brush(wx.Colour(0, 0, 0, 0), style=wx.TRANSPARENT))
# draws outline:
dc.DrawRoundedRectangle(area, curve)
dc.SetIdBounds(tmpId, area)
flowsize = self.appData['flowSize'] # 0, 1, or 2
# add a name label, loop info, except at smallest size
name = loop.params['name'].val
_show = self.appData['showLoopInfoInFlow']
if _show and flowsize:
_cond = 'conditions' in list(loop.params)
if _cond and loop.params['conditions'].val:
xnumTrials = 'x' + str(len(loop.params['conditions'].val))
else:
xnumTrials = ''
name += ' (' + str(loop.params['nReps'].val) + xnumTrials
abbrev = ['', # for flowsize == 0
{'random': 'rand.',
'sequential': 'sequ.',
'fullRandom': 'f-ran.',
'staircase': 'stair.',
'interleaved staircases': "int-str."},
{'random': 'random',
'sequential': 'sequential',
'fullRandom': 'fullRandom',
'staircase': 'staircase',
'interleaved staircases': "interl'vd stairs"}]
name += ' ' + abbrev[flowsize][loop.params['loopType'].val] + ')'
if flowsize == 0:
if len(name) > 9:
name = ' ' + name[:8] + '..'
else:
name = ' ' + name[:9]
else:
name = ' ' + name + ' '
dc.SetId(id)
font = self.GetFont()
if sys.platform == 'darwin':
basePtSize = (650, 750, 900)[flowsize]
elif sys.platform.startswith('linux'):
basePtSize = (750, 850, 1000)[flowsize]
else:
basePtSize = (700, 750, 800)[flowsize]
font.SetPointSize(basePtSize / self.dpi)
self.SetFont(font)
dc.SetFont(font)
# get size based on text
pad = (5, 8, 10)[self.appData['flowSize']]
w, h = self.GetFullTextExtent(name)[0:2]
x = startX + (endX - startX) / 2 - w / 2 - pad / 2
y = (height - h / 2)
# draw box
rect = wx.Rect(x, y, w + pad, h + pad)
# the edge should match the text
dc.SetPen(wx.Pen(cs['fl_flowline_bg']))
# try to make the loop fill brighter than the background canvas:
dc.SetBrush(wx.Brush(cs['fl_flowline_bg']))
dc.DrawRoundedRectangle(rect, (4, 6, 8)[flowsize])
# draw text
dc.SetTextForeground(cs['fl_flowline_fg'])
dc.DrawText(name, x + pad / 2, y + pad / 2)
self.componentFromID[id] = loop
# set the area for this component
dc.SetIdBounds(id, rect)
def extractText(stream):
"""Take a byte stream (or any file object of type b?) and return
:param stream: stream from wx.Process or any byte stream from a file
:return: text converted to unicode ready for appending to wx text view
"""
if constants.PY3:
return stream.read().decode('utf-8')
else:
return stream.read()
|
import os
import platform
import shutil
import unittest
from abjad.tools import abjadbooktools
from abjad.tools import systemtools
@unittest.skipIf(
platform.python_implementation() != 'CPython',
'Only for CPython.',
)
class TestLaTeXDocumentHandler(unittest.TestCase):
test_directory = os.path.dirname(__file__)
assets_directory = os.path.join(test_directory, 'assets')
source_path = os.path.join(
test_directory,
'chapters',
'chapter-1',
'source.tex',
)
with open(source_path, 'r') as file_pointer:
source_contents = file_pointer.read()
target_path = os.path.join(
test_directory,
'chapters',
'chapter-1',
'target.tex',
)
expected_path = os.path.join(
test_directory,
'chapters',
'chapter-1',
'expected.tex',
)
with open(expected_path, 'r') as file_pointer:
expected_contents = file_pointer.read()
expected_asset_names = (
'lilypond-65d03e56d1fdd997411f2f04c401fe16.ly',
'lilypond-65d03e56d1fdd997411f2f04c401fe16.pdf',
)
def setUp(self):
if os.path.exists(self.assets_directory):
shutil.rmtree(self.assets_directory)
if os.path.exists(self.target_path):
os.remove(self.target_path)
def test_latex_root_directory_1(self):
input_file_contents = [
'\\begin{comment}',
'<abjad>',
'note = Note(0, (1, 4))',
'show(note)',
'</abjad>',
'\\end{comment}',
]
assets_directory = 'ExamplePaper/assets'
input_file_path = 'ExamplePaper/chapters/chapter-1/section-2.tex'
latex_root_directory = 'ExamplePaper'
document_handler = abjadbooktools.LaTeXDocumentHandler(
assets_directory=assets_directory,
input_file_contents=input_file_contents,
input_file_path=input_file_path,
latex_root_directory=latex_root_directory,
)
rebuilt_source = document_handler(return_source=True)
assert rebuilt_source == systemtools.TestManager.clean_string(
'''
\\begin{comment}
<abjad>
note = Note(0, (1, 4))
show(note)
</abjad>
\\end{comment}
%%% ABJADBOOK START %%%
\\begin{lstlisting}
>>> note = Note(0, (1, 4))
>>> show(note)
\\end{lstlisting}
\\noindent\\includegraphics{assets/lilypond-65d03e56d1fdd997411f2f04c401fe16.pdf}
%%% ABJADBOOK END %%%
''',
)
def test_latex_root_directory_2(self):
assert not os.path.exists(self.target_path)
assert not os.path.exists(self.assets_directory)
document_handler = abjadbooktools.LaTeXDocumentHandler.from_path(
input_file_path=self.source_path,
assets_directory=self.assets_directory,
latex_root_directory=self.test_directory,
)
document_handler(output_file_path=self.target_path)
assert os.path.exists(self.target_path)
assert os.path.exists(self.assets_directory)
with open(self.target_path, 'r') as file_pointer:
target_contents = file_pointer.read()
assert target_contents == self.expected_contents
assert tuple(sorted(os.listdir(self.assets_directory))) == \
self.expected_asset_names
def tearDown(self):
if os.path.exists(self.assets_directory):
shutil.rmtree(self.assets_directory)
if os.path.exists(self.target_path):
os.remove(self.target_path)
with open(self.source_path, 'w') as file_pointer:
file_pointer.write(self.source_contents)
|
"""
Copyright 2016 Walter José and Alex de Sá
This file is part of the RECIPE Algorithm.
The RECIPE is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
RECIPE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. See http://www.gnu.org/licenses/.
"""
from sklearn.tree import DecisionTreeClassifier, ExtraTreeClassifier
def treeEstimator(args):
"""Uses scikit-learn's ExtraTreeClassifier or DecisionTreeClassifier
Parameters
----------
criterion : string
The function to measure the quality of a split. Supported criteria are “gini” for the Gini impurity and “entropy” for the information gain.
splitter : string
The strategy used to choose the split at each node. Supported strategies are “best” to choose the best split and “random” to choose the best random split.
class_weight : dict, list of dicts, “balanced”,
“balanced_subsample” or None, optional (default=None) Weights associated with classes in the form {class_label: weight}.
presort : bool
Whether to presort the data to speed up the finding of best splits in fitting.
max_features : int, float, string or None
The number of features to consider when looking for the best split:
max_depth : int or None
The maximum depth of the tree.
min_weight_fraction_leaf : float
The minimum weighted fraction of the sum total of weights (of all the input samples) required to be at a leaf node.
max_leaf_nodes : int or None
Grow a tree with max_leaf_nodes in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes.
"""
crit = "gini"
if(args[1].find("None")==-1):
crit=args[1]
split="best"
if(args[2].find("None")==-1):
split=args[2]
if(args[3].find("balanced")!=-1):
cw = "balanced"
elif(args[3].find("None")!=-1):
cw = None
psort = False
if(args[4].find("True")!=-1):
psort = True
elif(args[4].find("auto")!=-1):
psort = "auto"
if(args[5].find("sqrt")!=-1):
mf = "sqrt"
elif(args[5].find("log2")!=-1):
mf = "log2"
elif(args[5].find("None")!=-1):
mf = None
else:
mf = float(args[5])
if(args[6].find("None")!=-1):
md = None
else:
md = int(args[6])
mwfl = 0.0
if(args[7].find("None")==-1):
mwfl=float(args[7])
if(args[8].find("None")!=-1):
mln = None
else:
mln = int(args[8])
if(args[0].find("DecisionTreeClassifier")!=-1):
return DecisionTreeClassifier(criterion=crit, splitter=split,
max_depth=md, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=mwfl,
max_features=mf, random_state=42, max_leaf_nodes=mln, class_weight=cw, presort=psort)
else:
return ExtraTreeClassifier(criterion=crit, splitter=split,
max_depth=md, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=mwfl,
max_features=mf, random_state=42, max_leaf_nodes=mln, class_weight=cw)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.