repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
spesmilo/electrum | electrum/plugins/trezor/qt.py | 1 | 30784 | from functools import partial
import threading
from PyQt5.QtCore import Qt, QEventLoop, pyqtSignal
from PyQt5.QtWidgets import (QVBoxLayout, QLabel, QGridLayout, QPushButton,
QHBoxLayout, QButtonGroup, QGroupBox, QDialog,
QLineEdit, QRadioButton, QCheckBox, QWidget,
QMessageBox, QFileDialog, QSlider, QTabWidget)
from electrum.gui.qt.util import (WindowModalDialog, WWLabel, Buttons, CancelButton,
OkButton, CloseButton, PasswordLineEdit, getOpenFileName)
from electrum.i18n import _
from electrum.plugin import hook
from electrum.util import bh2u
from ..hw_wallet.qt import QtHandlerBase, QtPluginBase
from ..hw_wallet.plugin import only_hook_if_libraries_available
from .trezor import (TrezorPlugin, TIM_NEW, TIM_RECOVER, TrezorInitSettings,
PASSPHRASE_ON_DEVICE, Capability, BackupType, RecoveryDeviceType)
PASSPHRASE_HELP_SHORT =_(
"Passphrases allow you to access new wallets, each "
"hidden behind a particular case-sensitive passphrase.")
PASSPHRASE_HELP = PASSPHRASE_HELP_SHORT + " " + _(
"You need to create a separate Electrum wallet for each passphrase "
"you use as they each generate different addresses. Changing "
"your passphrase does not lose other wallets, each is still "
"accessible behind its own passphrase.")
RECOMMEND_PIN = _(
"You should enable PIN protection. Your PIN is the only protection "
"for your bitcoins if your device is lost or stolen.")
PASSPHRASE_NOT_PIN = _(
"If you forget a passphrase you will be unable to access any "
"bitcoins in the wallet behind it. A passphrase is not a PIN. "
"Only change this if you are sure you understand it.")
MATRIX_RECOVERY = _(
"Enter the recovery words by pressing the buttons according to what "
"the device shows on its display. You can also use your NUMPAD.\n"
"Press BACKSPACE to go back a choice or word.\n")
SEEDLESS_MODE_WARNING = _(
"In seedless mode, the mnemonic seed words are never shown to the user.\n"
"There is no backup, and the user has a proof of this.\n"
"This is an advanced feature, only suggested to be used in redundant multisig setups.")
class MatrixDialog(WindowModalDialog):
def __init__(self, parent):
super(MatrixDialog, self).__init__(parent)
self.setWindowTitle(_("Trezor Matrix Recovery"))
self.num = 9
self.loop = QEventLoop()
vbox = QVBoxLayout(self)
vbox.addWidget(WWLabel(MATRIX_RECOVERY))
grid = QGridLayout()
grid.setSpacing(0)
self.char_buttons = []
for y in range(3):
for x in range(3):
button = QPushButton('?')
button.clicked.connect(partial(self.process_key, ord('1') + y * 3 + x))
grid.addWidget(button, 3 - y, x)
self.char_buttons.append(button)
vbox.addLayout(grid)
self.backspace_button = QPushButton("<=")
self.backspace_button.clicked.connect(partial(self.process_key, Qt.Key_Backspace))
self.cancel_button = QPushButton(_("Cancel"))
self.cancel_button.clicked.connect(partial(self.process_key, Qt.Key_Escape))
buttons = Buttons(self.backspace_button, self.cancel_button)
vbox.addSpacing(40)
vbox.addLayout(buttons)
self.refresh()
self.show()
def refresh(self):
for y in range(3):
self.char_buttons[3 * y + 1].setEnabled(self.num == 9)
def is_valid(self, key):
return key >= ord('1') and key <= ord('9')
def process_key(self, key):
self.data = None
if key == Qt.Key_Backspace:
self.data = '\010'
elif key == Qt.Key_Escape:
self.data = 'x'
elif self.is_valid(key):
self.char_buttons[key - ord('1')].setFocus()
self.data = '%c' % key
if self.data:
self.loop.exit(0)
def keyPressEvent(self, event):
self.process_key(event.key())
if not self.data:
QDialog.keyPressEvent(self, event)
def get_matrix(self, num):
self.num = num
self.refresh()
self.loop.exec_()
class QtHandler(QtHandlerBase):
pin_signal = pyqtSignal(object, object)
matrix_signal = pyqtSignal(object)
close_matrix_dialog_signal = pyqtSignal()
def __init__(self, win, pin_matrix_widget_class, device):
super(QtHandler, self).__init__(win, device)
self.pin_signal.connect(self.pin_dialog)
self.matrix_signal.connect(self.matrix_recovery_dialog)
self.close_matrix_dialog_signal.connect(self._close_matrix_dialog)
self.pin_matrix_widget_class = pin_matrix_widget_class
self.matrix_dialog = None
self.passphrase_on_device = False
def get_pin(self, msg, *, show_strength=True):
self.done.clear()
self.pin_signal.emit(msg, show_strength)
self.done.wait()
return self.response
def get_matrix(self, msg):
self.done.clear()
self.matrix_signal.emit(msg)
self.done.wait()
data = self.matrix_dialog.data
if data == 'x':
self.close_matrix_dialog()
return data
def _close_matrix_dialog(self):
if self.matrix_dialog:
self.matrix_dialog.accept()
self.matrix_dialog = None
def close_matrix_dialog(self):
self.close_matrix_dialog_signal.emit()
def pin_dialog(self, msg, show_strength):
# Needed e.g. when resetting a device
self.clear_dialog()
dialog = WindowModalDialog(self.top_level_window(), _("Enter PIN"))
matrix = self.pin_matrix_widget_class(show_strength)
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
vbox.addWidget(matrix)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
self.response = str(matrix.get_value())
self.done.set()
def matrix_recovery_dialog(self, msg):
if not self.matrix_dialog:
self.matrix_dialog = MatrixDialog(self.top_level_window())
self.matrix_dialog.get_matrix(msg)
self.done.set()
def passphrase_dialog(self, msg, confirm):
# If confirm is true, require the user to enter the passphrase twice
parent = self.top_level_window()
d = WindowModalDialog(parent, _('Enter Passphrase'))
OK_button = OkButton(d, _('Enter Passphrase'))
OnDevice_button = QPushButton(_('Enter Passphrase on Device'))
new_pw = PasswordLineEdit()
conf_pw = PasswordLineEdit()
vbox = QVBoxLayout()
label = QLabel(msg + "\n")
label.setWordWrap(True)
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(0, 150)
grid.setColumnMinimumWidth(1, 100)
grid.setColumnStretch(1,1)
vbox.addWidget(label)
grid.addWidget(QLabel(_('Passphrase:')), 0, 0)
grid.addWidget(new_pw, 0, 1)
if confirm:
grid.addWidget(QLabel(_('Confirm Passphrase:')), 1, 0)
grid.addWidget(conf_pw, 1, 1)
vbox.addLayout(grid)
def enable_OK():
if not confirm:
ok = True
else:
ok = new_pw.text() == conf_pw.text()
OK_button.setEnabled(ok)
new_pw.textChanged.connect(enable_OK)
conf_pw.textChanged.connect(enable_OK)
vbox.addWidget(OK_button)
if self.passphrase_on_device:
vbox.addWidget(OnDevice_button)
d.setLayout(vbox)
self.passphrase = None
def ok_clicked():
self.passphrase = new_pw.text()
def on_device_clicked():
self.passphrase = PASSPHRASE_ON_DEVICE
OK_button.clicked.connect(ok_clicked)
OnDevice_button.clicked.connect(on_device_clicked)
OnDevice_button.clicked.connect(d.accept)
d.exec_()
self.done.set()
class QtPlugin(QtPluginBase):
# Derived classes must provide the following class-static variables:
# icon_file
# pin_matrix_widget_class
@only_hook_if_libraries_available
@hook
def receive_menu(self, menu, addrs, wallet):
if len(addrs) != 1:
return
for keystore in wallet.get_keystores():
if type(keystore) == self.keystore_class:
def show_address(keystore=keystore):
keystore.thread.add(partial(self.show_address, wallet, addrs[0], keystore))
device_name = "{} ({})".format(self.device, keystore.label)
menu.addAction(_("Show on {}").format(device_name), show_address)
def show_settings_dialog(self, window, keystore):
def connect():
device_id = self.choose_device(window, keystore)
return device_id
def show_dialog(device_id):
if device_id:
SettingsDialog(window, self, keystore, device_id).exec_()
keystore.thread.add(connect, on_success=show_dialog)
def request_trezor_init_settings(self, wizard, method, device_id):
vbox = QVBoxLayout()
next_enabled = True
devmgr = self.device_manager()
client = devmgr.client_by_id(device_id)
if not client:
raise Exception(_("The device was disconnected."))
model = client.get_trezor_model()
fw_version = client.client.version
capabilities = client.client.features.capabilities
have_shamir = Capability.Shamir in capabilities
# label
label = QLabel(_("Enter a label to name your device:"))
name = QLineEdit()
hl = QHBoxLayout()
hl.addWidget(label)
hl.addWidget(name)
hl.addStretch(1)
vbox.addLayout(hl)
# Backup type
gb_backuptype = QGroupBox()
hbox_backuptype = QHBoxLayout()
gb_backuptype.setLayout(hbox_backuptype)
vbox.addWidget(gb_backuptype)
gb_backuptype.setTitle(_('Select backup type:'))
bg_backuptype = QButtonGroup()
rb_single = QRadioButton(gb_backuptype)
rb_single.setText(_('Single seed (BIP39)'))
bg_backuptype.addButton(rb_single)
bg_backuptype.setId(rb_single, BackupType.Bip39)
hbox_backuptype.addWidget(rb_single)
rb_single.setChecked(True)
rb_shamir = QRadioButton(gb_backuptype)
rb_shamir.setText(_('Shamir'))
bg_backuptype.addButton(rb_shamir)
bg_backuptype.setId(rb_shamir, BackupType.Slip39_Basic)
hbox_backuptype.addWidget(rb_shamir)
rb_shamir.setEnabled(Capability.Shamir in capabilities)
rb_shamir.setVisible(False) # visible with "expert settings"
rb_shamir_groups = QRadioButton(gb_backuptype)
rb_shamir_groups.setText(_('Super Shamir'))
bg_backuptype.addButton(rb_shamir_groups)
bg_backuptype.setId(rb_shamir_groups, BackupType.Slip39_Advanced)
hbox_backuptype.addWidget(rb_shamir_groups)
rb_shamir_groups.setEnabled(Capability.ShamirGroups in capabilities)
rb_shamir_groups.setVisible(False) # visible with "expert settings"
# word count
word_count_buttons = {}
gb_numwords = QGroupBox()
hbox1 = QHBoxLayout()
gb_numwords.setLayout(hbox1)
vbox.addWidget(gb_numwords)
gb_numwords.setTitle(_("Select seed/share length:"))
bg_numwords = QButtonGroup()
for count in (12, 18, 20, 24, 33):
rb = QRadioButton(gb_numwords)
word_count_buttons[count] = rb
rb.setText(_("{:d} words").format(count))
bg_numwords.addButton(rb)
bg_numwords.setId(rb, count)
hbox1.addWidget(rb)
rb.setChecked(True)
def configure_word_counts():
if model == "1":
checked_wordcount = 24
else:
checked_wordcount = 12
if method == TIM_RECOVER:
if have_shamir:
valid_word_counts = (12, 18, 20, 24, 33)
else:
valid_word_counts = (12, 18, 24)
elif rb_single.isChecked():
valid_word_counts = (12, 18, 24)
gb_numwords.setTitle(_('Select seed length:'))
else:
valid_word_counts = (20, 33)
checked_wordcount = 20
gb_numwords.setTitle(_('Select share length:'))
word_count_buttons[checked_wordcount].setChecked(True)
for c, btn in word_count_buttons.items():
btn.setVisible(c in valid_word_counts)
bg_backuptype.buttonClicked.connect(configure_word_counts)
configure_word_counts()
# set up conditional visibility:
# 1. backup_type is only visible when creating new seed
gb_backuptype.setVisible(method == TIM_NEW)
# 2. word_count is not visible when recovering on TT
if method == TIM_RECOVER and model != "1":
gb_numwords.setVisible(False)
# PIN
cb_pin = QCheckBox(_('Enable PIN protection'))
cb_pin.setChecked(True)
vbox.addWidget(WWLabel(RECOMMEND_PIN))
vbox.addWidget(cb_pin)
# "expert settings" button
expert_vbox = QVBoxLayout()
expert_widget = QWidget()
expert_widget.setLayout(expert_vbox)
expert_widget.setVisible(False)
expert_button = QPushButton(_("Show expert settings"))
def show_expert_settings():
expert_button.setVisible(False)
expert_widget.setVisible(True)
rb_shamir.setVisible(True)
rb_shamir_groups.setVisible(True)
expert_button.clicked.connect(show_expert_settings)
vbox.addWidget(expert_button)
# passphrase
passphrase_msg = WWLabel(PASSPHRASE_HELP_SHORT)
passphrase_warning = WWLabel(PASSPHRASE_NOT_PIN)
passphrase_warning.setStyleSheet("color: red")
cb_phrase = QCheckBox(_('Enable passphrases'))
cb_phrase.setChecked(False)
expert_vbox.addWidget(passphrase_msg)
expert_vbox.addWidget(passphrase_warning)
expert_vbox.addWidget(cb_phrase)
# ask for recovery type (random word order OR matrix)
bg_rectype = None
if method == TIM_RECOVER and model == '1':
gb_rectype = QGroupBox()
hbox_rectype = QHBoxLayout()
gb_rectype.setLayout(hbox_rectype)
expert_vbox.addWidget(gb_rectype)
gb_rectype.setTitle(_("Select recovery type:"))
bg_rectype = QButtonGroup()
rb1 = QRadioButton(gb_rectype)
rb1.setText(_('Scrambled words'))
bg_rectype.addButton(rb1)
bg_rectype.setId(rb1, RecoveryDeviceType.ScrambledWords)
hbox_rectype.addWidget(rb1)
rb1.setChecked(True)
rb2 = QRadioButton(gb_rectype)
rb2.setText(_('Matrix'))
bg_rectype.addButton(rb2)
bg_rectype.setId(rb2, RecoveryDeviceType.Matrix)
hbox_rectype.addWidget(rb2)
# no backup
cb_no_backup = None
if method == TIM_NEW:
cb_no_backup = QCheckBox(f'''{_('Enable seedless mode')}''')
cb_no_backup.setChecked(False)
if (model == '1' and fw_version >= (1, 7, 1)
or model == 'T' and fw_version >= (2, 0, 9)):
cb_no_backup.setToolTip(SEEDLESS_MODE_WARNING)
else:
cb_no_backup.setEnabled(False)
cb_no_backup.setToolTip(_('Firmware version too old.'))
expert_vbox.addWidget(cb_no_backup)
vbox.addWidget(expert_widget)
wizard.exec_layout(vbox, next_enabled=next_enabled)
return TrezorInitSettings(
word_count=bg_numwords.checkedId(),
label=name.text(),
pin_enabled=cb_pin.isChecked(),
passphrase_enabled=cb_phrase.isChecked(),
recovery_type=bg_rectype.checkedId() if bg_rectype else None,
backup_type=bg_backuptype.checkedId(),
no_backup=cb_no_backup.isChecked() if cb_no_backup else False,
)
class Plugin(TrezorPlugin, QtPlugin):
icon_unpaired = "trezor_unpaired.png"
icon_paired = "trezor.png"
def create_handler(self, window):
return QtHandler(window, self.pin_matrix_widget_class(), self.device)
@classmethod
def pin_matrix_widget_class(self):
from trezorlib.qt.pinmatrix import PinMatrixWidget
return PinMatrixWidget
class SettingsDialog(WindowModalDialog):
'''This dialog doesn't require a device be paired with a wallet.
We want users to be able to wipe a device even if they've forgotten
their PIN.'''
def __init__(self, window, plugin, keystore, device_id):
title = _("{} Settings").format(plugin.device)
super(SettingsDialog, self).__init__(window, title)
self.setMaximumWidth(540)
devmgr = plugin.device_manager()
config = devmgr.config
handler = keystore.handler
thread = keystore.thread
hs_cols, hs_rows = (128, 64)
def invoke_client(method, *args, **kw_args):
unpair_after = kw_args.pop('unpair_after', False)
def task():
client = devmgr.client_by_id(device_id)
if not client:
raise RuntimeError("Device not connected")
if method:
getattr(client, method)(*args, **kw_args)
if unpair_after:
devmgr.unpair_id(device_id)
return client.features
thread.add(task, on_success=update)
def update(features):
self.features = features
set_label_enabled()
if features.bootloader_hash:
bl_hash = bh2u(features.bootloader_hash)
bl_hash = "\n".join([bl_hash[:32], bl_hash[32:]])
else:
bl_hash = "N/A"
noyes = [_("No"), _("Yes")]
endis = [_("Enable Passphrases"), _("Disable Passphrases")]
disen = [_("Disabled"), _("Enabled")]
setchange = [_("Set a PIN"), _("Change PIN")]
version = "%d.%d.%d" % (features.major_version,
features.minor_version,
features.patch_version)
device_label.setText(features.label)
pin_set_label.setText(noyes[features.pin_protection])
passphrases_label.setText(disen[features.passphrase_protection])
bl_hash_label.setText(bl_hash)
label_edit.setText(features.label)
device_id_label.setText(features.device_id)
initialized_label.setText(noyes[features.initialized])
version_label.setText(version)
clear_pin_button.setVisible(features.pin_protection)
clear_pin_warning.setVisible(features.pin_protection)
pin_button.setText(setchange[features.pin_protection])
pin_msg.setVisible(not features.pin_protection)
passphrase_button.setText(endis[features.passphrase_protection])
language_label.setText(features.language)
def set_label_enabled():
label_apply.setEnabled(label_edit.text() != self.features.label)
def rename():
invoke_client('change_label', label_edit.text())
def toggle_passphrase():
title = _("Confirm Toggle Passphrase Protection")
currently_enabled = self.features.passphrase_protection
if currently_enabled:
msg = _("After disabling passphrases, you can only pair this "
"Electrum wallet if it had an empty passphrase. "
"If its passphrase was not empty, you will need to "
"create a new wallet with the install wizard. You "
"can use this wallet again at any time by re-enabling "
"passphrases and entering its passphrase.")
else:
msg = _("Your current Electrum wallet can only be used with "
"an empty passphrase. You must create a separate "
"wallet with the install wizard for other passphrases "
"as each one generates a new set of addresses.")
msg += "\n\n" + _("Are you sure you want to proceed?")
if not self.question(msg, title=title):
return
invoke_client('toggle_passphrase', unpair_after=currently_enabled)
def change_homescreen():
filename = getOpenFileName(
parent=self,
title=_("Choose Homescreen"),
config=config,
)
if not filename:
return # user cancelled
if filename.endswith('.toif'):
img = open(filename, 'rb').read()
if img[:8] != b'TOIf\x90\x00\x90\x00':
handler.show_error('File is not a TOIF file with size of 144x144')
return
else:
from PIL import Image # FIXME
im = Image.open(filename)
if im.size != (128, 64):
handler.show_error('Image must be 128 x 64 pixels')
return
im = im.convert('1')
pix = im.load()
img = bytearray(1024)
for j in range(64):
for i in range(128):
if pix[i, j]:
o = (i + j * 128)
img[o // 8] |= (1 << (7 - o % 8))
img = bytes(img)
invoke_client('change_homescreen', img)
def clear_homescreen():
invoke_client('change_homescreen', b'\x00')
def set_pin():
invoke_client('set_pin', remove=False)
def clear_pin():
invoke_client('set_pin', remove=True)
def wipe_device():
wallet = window.wallet
if wallet and sum(wallet.get_balance()):
title = _("Confirm Device Wipe")
msg = _("Are you SURE you want to wipe the device?\n"
"Your wallet still has bitcoins in it!")
if not self.question(msg, title=title,
icon=QMessageBox.Critical):
return
invoke_client('wipe_device', unpair_after=True)
def slider_moved():
mins = timeout_slider.sliderPosition()
timeout_minutes.setText(_("{:2d} minutes").format(mins))
def slider_released():
config.set_session_timeout(timeout_slider.sliderPosition() * 60)
# Information tab
info_tab = QWidget()
info_layout = QVBoxLayout(info_tab)
info_glayout = QGridLayout()
info_glayout.setColumnStretch(2, 1)
device_label = QLabel()
pin_set_label = QLabel()
passphrases_label = QLabel()
version_label = QLabel()
device_id_label = QLabel()
bl_hash_label = QLabel()
bl_hash_label.setWordWrap(True)
language_label = QLabel()
initialized_label = QLabel()
rows = [
(_("Device Label"), device_label),
(_("PIN set"), pin_set_label),
(_("Passphrases"), passphrases_label),
(_("Firmware Version"), version_label),
(_("Device ID"), device_id_label),
(_("Bootloader Hash"), bl_hash_label),
(_("Language"), language_label),
(_("Initialized"), initialized_label),
]
for row_num, (label, widget) in enumerate(rows):
info_glayout.addWidget(QLabel(label), row_num, 0)
info_glayout.addWidget(widget, row_num, 1)
info_layout.addLayout(info_glayout)
# Settings tab
settings_tab = QWidget()
settings_layout = QVBoxLayout(settings_tab)
settings_glayout = QGridLayout()
# Settings tab - Label
label_msg = QLabel(_("Name this {}. If you have multiple devices "
"their labels help distinguish them.")
.format(plugin.device))
label_msg.setWordWrap(True)
label_label = QLabel(_("Device Label"))
label_edit = QLineEdit()
label_edit.setMinimumWidth(150)
label_edit.setMaxLength(plugin.MAX_LABEL_LEN)
label_apply = QPushButton(_("Apply"))
label_apply.clicked.connect(rename)
label_edit.textChanged.connect(set_label_enabled)
settings_glayout.addWidget(label_label, 0, 0)
settings_glayout.addWidget(label_edit, 0, 1, 1, 2)
settings_glayout.addWidget(label_apply, 0, 3)
settings_glayout.addWidget(label_msg, 1, 1, 1, -1)
# Settings tab - PIN
pin_label = QLabel(_("PIN Protection"))
pin_button = QPushButton()
pin_button.clicked.connect(set_pin)
settings_glayout.addWidget(pin_label, 2, 0)
settings_glayout.addWidget(pin_button, 2, 1)
pin_msg = QLabel(_("PIN protection is strongly recommended. "
"A PIN is your only protection against someone "
"stealing your bitcoins if they obtain physical "
"access to your {}.").format(plugin.device))
pin_msg.setWordWrap(True)
pin_msg.setStyleSheet("color: red")
settings_glayout.addWidget(pin_msg, 3, 1, 1, -1)
# Settings tab - Homescreen
homescreen_label = QLabel(_("Homescreen"))
homescreen_change_button = QPushButton(_("Change..."))
homescreen_clear_button = QPushButton(_("Reset"))
homescreen_change_button.clicked.connect(change_homescreen)
try:
import PIL
except ImportError:
homescreen_change_button.setDisabled(True)
homescreen_change_button.setToolTip(
_("Required package 'PIL' is not available - Please install it or use the Trezor website instead.")
)
homescreen_clear_button.clicked.connect(clear_homescreen)
homescreen_msg = QLabel(_("You can set the homescreen on your "
"device to personalize it. You must "
"choose a {} x {} monochrome black and "
"white image.").format(hs_cols, hs_rows))
homescreen_msg.setWordWrap(True)
settings_glayout.addWidget(homescreen_label, 4, 0)
settings_glayout.addWidget(homescreen_change_button, 4, 1)
settings_glayout.addWidget(homescreen_clear_button, 4, 2)
settings_glayout.addWidget(homescreen_msg, 5, 1, 1, -1)
# Settings tab - Session Timeout
timeout_label = QLabel(_("Session Timeout"))
timeout_minutes = QLabel()
timeout_slider = QSlider(Qt.Horizontal)
timeout_slider.setRange(1, 60)
timeout_slider.setSingleStep(1)
timeout_slider.setTickInterval(5)
timeout_slider.setTickPosition(QSlider.TicksBelow)
timeout_slider.setTracking(True)
timeout_msg = QLabel(
_("Clear the session after the specified period "
"of inactivity. Once a session has timed out, "
"your PIN and passphrase (if enabled) must be "
"re-entered to use the device."))
timeout_msg.setWordWrap(True)
timeout_slider.setSliderPosition(config.get_session_timeout() // 60)
slider_moved()
timeout_slider.valueChanged.connect(slider_moved)
timeout_slider.sliderReleased.connect(slider_released)
settings_glayout.addWidget(timeout_label, 6, 0)
settings_glayout.addWidget(timeout_slider, 6, 1, 1, 3)
settings_glayout.addWidget(timeout_minutes, 6, 4)
settings_glayout.addWidget(timeout_msg, 7, 1, 1, -1)
settings_layout.addLayout(settings_glayout)
settings_layout.addStretch(1)
# Advanced tab
advanced_tab = QWidget()
advanced_layout = QVBoxLayout(advanced_tab)
advanced_glayout = QGridLayout()
# Advanced tab - clear PIN
clear_pin_button = QPushButton(_("Disable PIN"))
clear_pin_button.clicked.connect(clear_pin)
clear_pin_warning = QLabel(
_("If you disable your PIN, anyone with physical access to your "
"{} device can spend your bitcoins.").format(plugin.device))
clear_pin_warning.setWordWrap(True)
clear_pin_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(clear_pin_button, 0, 2)
advanced_glayout.addWidget(clear_pin_warning, 1, 0, 1, 5)
# Advanced tab - toggle passphrase protection
passphrase_button = QPushButton()
passphrase_button.clicked.connect(toggle_passphrase)
passphrase_msg = WWLabel(PASSPHRASE_HELP)
passphrase_warning = WWLabel(PASSPHRASE_NOT_PIN)
passphrase_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(passphrase_button, 3, 2)
advanced_glayout.addWidget(passphrase_msg, 4, 0, 1, 5)
advanced_glayout.addWidget(passphrase_warning, 5, 0, 1, 5)
# Advanced tab - wipe device
wipe_device_button = QPushButton(_("Wipe Device"))
wipe_device_button.clicked.connect(wipe_device)
wipe_device_msg = QLabel(
_("Wipe the device, removing all data from it. The firmware "
"is left unchanged."))
wipe_device_msg.setWordWrap(True)
wipe_device_warning = QLabel(
_("Only wipe a device if you have the recovery seed written down "
"and the device wallet(s) are empty, otherwise the bitcoins "
"will be lost forever."))
wipe_device_warning.setWordWrap(True)
wipe_device_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(wipe_device_button, 6, 2)
advanced_glayout.addWidget(wipe_device_msg, 7, 0, 1, 5)
advanced_glayout.addWidget(wipe_device_warning, 8, 0, 1, 5)
advanced_layout.addLayout(advanced_glayout)
advanced_layout.addStretch(1)
tabs = QTabWidget(self)
tabs.addTab(info_tab, _("Information"))
tabs.addTab(settings_tab, _("Settings"))
tabs.addTab(advanced_tab, _("Advanced"))
dialog_vbox = QVBoxLayout(self)
dialog_vbox.addWidget(tabs)
dialog_vbox.addLayout(Buttons(CloseButton(self)))
# Update information
invoke_client(None)
| mit |
thisisALPINE/subterfuge | sslstrip/SSLServerConnection.py | 68 | 4390 | # Copyright (c) 2004-2009 Moxie Marlinspike
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging, re, string
from ServerConnection import ServerConnection
class SSLServerConnection(ServerConnection):
'''
For SSL connections to a server, we need to do some additional stripping. First we need
to make note of any relative links, as the server will be expecting those to be requested
via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies.
'''
cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE)
cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE)
iconExpression = re.compile(r"<link rel=\"shortcut icon\" .*href=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE)
headExpression = re.compile(r"<head>", re.IGNORECASE)
def __init__(self, command, uri, postData, headers, client):
ServerConnection.__init__(self, command, uri, postData, headers, client)
def getLogLevel(self):
return logging.INFO
def getPostPrefix(self):
return "SECURE POST"
def handleHeader(self, key, value):
if (key.lower() == 'set-cookie'):
value = SSLServerConnection.cookieExpression.sub("\g<1>", value)
ServerConnection.handleHeader(self, key, value)
def stripFileFromPath(self, path):
(strippedPath, lastSlash, file) = path.rpartition('/')
return strippedPath
def buildAbsoluteLink(self, link):
absoluteLink = ""
if ((not link.startswith('http')) and (not link.startswith('/'))):
absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link
logging.debug("Found path-relative link in secure transmission: " + link)
logging.debug("New Absolute path-relative link: " + absoluteLink)
elif not link.startswith('http'):
absoluteLink = "http://"+self.headers['host']+link
logging.debug("Found relative link in secure transmission: " + link)
logging.debug("New Absolute link: " + absoluteLink)
if not absoluteLink == "":
absoluteLink = absoluteLink.replace('&', '&')
self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink);
def replaceCssLinks(self, data):
iterator = re.finditer(SSLServerConnection.cssExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(1))
return data
def replaceFavicon(self, data):
match = re.search(SSLServerConnection.iconExpression, data)
if (match != None):
data = re.sub(SSLServerConnection.iconExpression,
"<link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
else:
data = re.sub(SSLServerConnection.headExpression,
"<head><link rel=\"SHORTCUT ICON\" href=\"/favicon-x-favicon-x.ico\">", data)
return data
def replaceSecureLinks(self, data):
data = ServerConnection.replaceSecureLinks(self, data)
data = self.replaceCssLinks(data)
if (self.urlMonitor.isFaviconSpoofing()):
data = self.replaceFavicon(data)
iterator = re.finditer(SSLServerConnection.linkExpression, data)
for match in iterator:
self.buildAbsoluteLink(match.group(10))
return data
| gpl-3.0 |
EricMuller/mywebmarks-backend | requirements/twisted/Twisted-17.1.0/build/lib.linux-x86_64-3.5/twisted/words/im/ircsupport.py | 13 | 9150 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
IRC support for Instance Messenger.
"""
from twisted.words.protocols import irc
from twisted.words.im.locals import ONLINE
from twisted.internet import defer, reactor, protocol
from twisted.internet.defer import succeed
from twisted.words.im import basesupport, interfaces, locals
from zope.interface import implementer
class IRCPerson(basesupport.AbstractPerson):
def imperson_whois(self):
if self.account.client is None:
raise locals.OfflineError
self.account.client.sendLine("WHOIS %s" % self.name)
### interface impl
def isOnline(self):
return ONLINE
def getStatus(self):
return ONLINE
def setStatus(self,status):
self.status=status
self.chat.getContactsList().setContactStatus(self)
def sendMessage(self, text, meta=None):
if self.account.client is None:
raise locals.OfflineError
for line in text.split('\n'):
if meta and meta.get("style", None) == "emote":
self.account.client.ctcpMakeQuery(self.name,[('ACTION', line)])
else:
self.account.client.msg(self.name, line)
return succeed(text)
@implementer(interfaces.IGroup)
class IRCGroup(basesupport.AbstractGroup):
def imgroup_testAction(self):
pass
def imtarget_kick(self, target):
if self.account.client is None:
raise locals.OfflineError
reason = "for great justice!"
self.account.client.sendLine("KICK #%s %s :%s" % (
self.name, target.name, reason))
### Interface Implementation
def setTopic(self, topic):
if self.account.client is None:
raise locals.OfflineError
self.account.client.topic(self.name, topic)
def sendGroupMessage(self, text, meta={}):
if self.account.client is None:
raise locals.OfflineError
if meta and meta.get("style", None) == "emote":
self.account.client.ctcpMakeQuery(self.name,[('ACTION', text)])
return succeed(text)
#standard shmandard, clients don't support plain escaped newlines!
for line in text.split('\n'):
self.account.client.say(self.name, line)
return succeed(text)
def leave(self):
if self.account.client is None:
raise locals.OfflineError
self.account.client.leave(self.name)
self.account.client.getGroupConversation(self.name,1)
class IRCProto(basesupport.AbstractClientMixin, irc.IRCClient):
def __init__(self, account, chatui, logonDeferred=None):
basesupport.AbstractClientMixin.__init__(self, account, chatui,
logonDeferred)
self._namreplies={}
self._ingroups={}
self._groups={}
self._topics={}
def getGroupConversation(self, name, hide=0):
name = name.lower()
return self.chat.getGroupConversation(self.chat.getGroup(name, self),
stayHidden=hide)
def getPerson(self,name):
return self.chat.getPerson(name, self)
def connectionMade(self):
# XXX: Why do I duplicate code in IRCClient.register?
try:
self.performLogin = True
self.nickname = self.account.username
self.password = self.account.password
self.realname = "Twisted-IM user"
irc.IRCClient.connectionMade(self)
for channel in self.account.channels:
self.joinGroup(channel)
self.account._isOnline=1
if self._logonDeferred is not None:
self._logonDeferred.callback(self)
self.chat.getContactsList()
except:
import traceback
traceback.print_exc()
def setNick(self,nick):
self.name=nick
self.accountName="%s (IRC)"%nick
irc.IRCClient.setNick(self,nick)
def kickedFrom(self, channel, kicker, message):
"""
Called when I am kicked from a channel.
"""
return self.chat.getGroupConversation(
self.chat.getGroup(channel[1:], self), 1)
def userKicked(self, kickee, channel, kicker, message):
pass
def noticed(self, username, channel, message):
self.privmsg(username, channel, message, {"dontAutoRespond": 1})
def privmsg(self, username, channel, message, metadata=None):
if metadata is None:
metadata = {}
username = username.split('!',1)[0]
if username==self.name: return
if channel[0]=='#':
group=channel[1:]
self.getGroupConversation(group).showGroupMessage(username, message, metadata)
return
self.chat.getConversation(self.getPerson(username)).showMessage(message, metadata)
def action(self,username,channel,emote):
username = username.split('!',1)[0]
if username==self.name: return
meta={'style':'emote'}
if channel[0]=='#':
group=channel[1:]
self.getGroupConversation(group).showGroupMessage(username, emote, meta)
return
self.chat.getConversation(self.getPerson(username)).showMessage(emote,meta)
def irc_RPL_NAMREPLY(self,prefix,params):
"""
RPL_NAMREPLY
>> NAMES #bnl
<< :Arlington.VA.US.Undernet.Org 353 z3p = #bnl :pSwede Dan-- SkOyg AG
"""
group = params[2][1:].lower()
users = params[3].split()
for ui in range(len(users)):
while users[ui][0] in ["@","+"]: # channel modes
users[ui]=users[ui][1:]
if group not in self._namreplies:
self._namreplies[group]=[]
self._namreplies[group].extend(users)
for nickname in users:
try:
self._ingroups[nickname].append(group)
except:
self._ingroups[nickname]=[group]
def irc_RPL_ENDOFNAMES(self,prefix,params):
group=params[1][1:]
self.getGroupConversation(group).setGroupMembers(self._namreplies[group.lower()])
del self._namreplies[group.lower()]
def irc_RPL_TOPIC(self,prefix,params):
self._topics[params[1][1:]]=params[2]
def irc_333(self,prefix,params):
group=params[1][1:]
self.getGroupConversation(group).setTopic(self._topics[group],params[2])
del self._topics[group]
def irc_TOPIC(self,prefix,params):
nickname = prefix.split("!")[0]
group = params[0][1:]
topic = params[1]
self.getGroupConversation(group).setTopic(topic,nickname)
def irc_JOIN(self,prefix,params):
nickname = prefix.split("!")[0]
group = params[0][1:].lower()
if nickname!=self.nickname:
try:
self._ingroups[nickname].append(group)
except:
self._ingroups[nickname]=[group]
self.getGroupConversation(group).memberJoined(nickname)
def irc_PART(self,prefix,params):
nickname = prefix.split("!")[0]
group = params[0][1:].lower()
if nickname!=self.nickname:
if group in self._ingroups[nickname]:
self._ingroups[nickname].remove(group)
self.getGroupConversation(group).memberLeft(nickname)
def irc_QUIT(self,prefix,params):
nickname = prefix.split("!")[0]
if nickname in self._ingroups:
for group in self._ingroups[nickname]:
self.getGroupConversation(group).memberLeft(nickname)
self._ingroups[nickname]=[]
def irc_NICK(self, prefix, params):
fromNick = prefix.split("!")[0]
toNick = params[0]
if fromNick not in self._ingroups:
return
for group in self._ingroups[fromNick]:
self.getGroupConversation(group).memberChangedNick(fromNick, toNick)
self._ingroups[toNick] = self._ingroups[fromNick]
del self._ingroups[fromNick]
def irc_unknown(self, prefix, command, params):
pass
# GTKIM calls
def joinGroup(self,name):
self.join(name)
self.getGroupConversation(name)
@implementer(interfaces.IAccount)
class IRCAccount(basesupport.AbstractAccount):
gatewayType = "IRC"
_groupFactory = IRCGroup
_personFactory = IRCPerson
def __init__(self, accountName, autoLogin, username, password, host, port,
channels=''):
basesupport.AbstractAccount.__init__(self, accountName, autoLogin,
username, password, host, port)
self.channels = [channel.strip() for channel in channels.split(',')]
if self.channels == ['']:
self.channels = []
def _startLogOn(self, chatui):
logonDeferred = defer.Deferred()
cc = protocol.ClientCreator(reactor, IRCProto, self, chatui,
logonDeferred)
d = cc.connectTCP(self.host, self.port)
d.addErrback(logonDeferred.errback)
return logonDeferred
| mit |
TidalPaladin/Superliminal-resin | app/plugin.dbmc/resources/lib/accountsettings.py | 2 | 3881 | #/*
# * Copyright (C) 2013 Joost Kop
# *
# *
# * This Program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2, or (at your option)
# * any later version.
# *
# * This Program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; see the file COPYING. If not, write to
# * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# * http://www.gnu.org/copyleft/gpl.html
# *
# */
import xbmcvfs
import shutil
import os
import pickle
from resources.lib.utils import *
class AccountSettings(object):
'''
Class which loads and saves all the account settings,
for easy access to the account settings
'''
def __init__(self, account_name):
if isinstance (account_name,str):
self.account_name = account_name.decode("utf-8")
else:
self.account_name = account_name
self.access_token = u''
self.passcode = ''
self.passcodetimeout = 30
self.session_id = ''
self.synchronisation = False
self.syncfreq = 5
self.syncpath = u''
self.remotepath = u''
dataPath = xbmc.translatePath( ADDON.getAddonInfo('profile') ).decode("utf-8")
self.account_dir = os.path.normpath(dataPath + '/accounts/' + self.account_name) + os.sep #add os seperator because it is a dir
#read from location if present
if xbmcvfs.exists( self.account_dir.encode("utf-8") ):
self.load()
#Don't use the stored account_dir
self.account_dir = os.path.normpath(dataPath + '/accounts/' + self.account_name) + os.sep #add os seperator because it is a dir
else:
log_debug('Account (%s) doesn\'t exist yet' % (self.account_name) )
def load(self):
log_debug('Loading account settings: %s' % (self.account_name) )
settings_file = os.path.normpath(self.account_dir + 'settings')
try:
with open(settings_file, 'rb') as file_obj:
tmp_dict = pickle.load(file_obj)
except Exception as exc:
log_error('Failed to load the settings: %s' % (str(exc)) )
else:
self.__dict__.update(tmp_dict)
#correct the items; make sure that they are unicode...)
if isinstance (self.account_name,str):
self.account_name = self.account_name.decode("utf-8")
if isinstance (self.syncpath,str):
self.syncpath = self.syncpath.decode("utf-8")
if isinstance (self.remotepath,str):
self.remotepath = self.remotepath.decode("utf-8")
def save(self):
log_debug('Save account settings: %s' % (self.account_name) )
#check if the account directory is present, create otherwise
if not xbmcvfs.exists( self.account_dir.encode("utf-8") ):
xbmcvfs.mkdirs( self.account_dir.encode("utf-8") )
#Save...
settings_file = os.path.normpath(self.account_dir + u'settings')
try:
with open(settings_file, 'wb') as file_obj:
pickle.dump(self.__dict__, file_obj)
except Exception as exc:
log_error('Failed saving the settings: %s' % (str(exc)) )
def remove(self):
log_debug('Remove account folder: %s' % (self.account_dir) )
shutil.rmtree( self.account_dir )
#remove cache folder
shutil.rmtree( get_cache_path(self.account_name) )
#remove synced data is done in the DropboxSynchronizer!
| apache-2.0 |
vladryk/horizon | openstack_dashboard/dashboards/project/vpn/views.py | 31 | 17672 | # Copyright 2013, Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import tabs
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.vpn \
import forms as vpn_forms
from openstack_dashboard.dashboards.project.vpn import tables as vpn_tables
from openstack_dashboard.dashboards.project.vpn import tabs as vpn_tabs
from openstack_dashboard.dashboards.project.vpn \
import workflows as vpn_workflows
import re
class IndexView(tabs.TabView):
tab_group_class = vpn_tabs.VPNTabs
template_name = 'project/vpn/index.html'
page_title = _("Virtual Private Network")
def post(self, request, *args, **kwargs):
obj_ids = request.POST.getlist('object_ids')
action = request.POST['action']
m = re.search('.delete([a-z]+)', action).group(1)
if obj_ids == []:
obj_ids.append(re.search('([0-9a-z-]+)$', action).group(1))
if m == 'vpnservice':
for obj_id in obj_ids:
try:
api.vpn.vpnservice_delete(request, obj_id)
messages.success(request,
_('Deleted VPN Service %s') % obj_id)
except Exception as e:
exceptions.handle(request,
_('Unable to delete VPN Service: %s')
% e)
elif m == 'ikepolicy':
for obj_id in obj_ids:
try:
api.vpn.ikepolicy_delete(request, obj_id)
messages.success(request,
_('Deleted IKE Policy %s') % obj_id)
except Exception as e:
exceptions.handle(request,
_('Unable to delete IKE Policy: %s') % e)
elif m == 'ipsecpolicy':
for obj_id in obj_ids:
try:
api.vpn.ipsecpolicy_delete(request, obj_id)
messages.success(request,
_('Deleted IPSec Policy %s') % obj_id)
except Exception as e:
exceptions.handle(request,
_('Unable to delete IPSec Policy: %s')
% e)
elif m == 'ipsecsiteconnection':
for obj_id in obj_ids:
try:
api.vpn.ipsecsiteconnection_delete(request, obj_id)
messages.success(request,
_('Deleted IPSec Site Connection %s')
% obj_id)
except Exception as e:
exceptions.handle(
request,
_('Unable to delete IPSec Site Connection: %s') % e)
return self.get(request, *args, **kwargs)
class AddVPNServiceView(workflows.WorkflowView):
workflow_class = vpn_workflows.AddVPNService
def get_initial(self):
initial = super(AddVPNServiceView, self).get_initial()
return initial
class AddIPSecSiteConnectionView(workflows.WorkflowView):
workflow_class = vpn_workflows.AddIPSecSiteConnection
def get_initial(self):
initial = super(AddIPSecSiteConnectionView, self).get_initial()
return initial
class AddIKEPolicyView(workflows.WorkflowView):
workflow_class = vpn_workflows.AddIKEPolicy
def get_initial(self):
initial = super(AddIKEPolicyView, self).get_initial()
return initial
class AddIPSecPolicyView(workflows.WorkflowView):
workflow_class = vpn_workflows.AddIPSecPolicy
def get_initial(self):
initial = super(AddIPSecPolicyView, self).get_initial()
return initial
class IKEPolicyDetailsView(tabs.TabView):
tab_group_class = vpn_tabs.IKEPolicyDetailsTabs
template_name = 'project/vpn/details_tabs.html'
page_title = _("IKE Policy Details")
@memoized.memoized_method
def get_data(self):
pid = self.kwargs['ikepolicy_id']
try:
return api.vpn.ikepolicy_get(self.request, pid)
except Exception:
msg = _('Unable to retrieve IKE Policy details.')
exceptions.handle(self.request, msg,
redirect=self.get_redirect_url())
def get_context_data(self, **kwargs):
context = super(IKEPolicyDetailsView, self).get_context_data(**kwargs)
ikepolicy = self.get_data()
table = vpn_tables.IKEPoliciesTable(self.request)
context["ikepolicy"] = ikepolicy
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(ikepolicy)
return context
def get_tabs(self, request, *args, **kwargs):
ikepolicy = self.get_data()
return self.tab_group_class(request, ikepolicy=ikepolicy, **kwargs)
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:vpn:index')
class IPSecPolicyDetailsView(tabs.TabView):
tab_group_class = vpn_tabs.IPSecPolicyDetailsTabs
template_name = 'project/vpn/details_tabs.html'
page_title = _("IPSec Policy Details")
@memoized.memoized_method
def get_data(self):
pid = self.kwargs['ipsecpolicy_id']
try:
return api.vpn.ipsecpolicy_get(self.request, pid)
except Exception:
msg = _('Unable to retrieve IPSec Policy details.')
exceptions.handle(self.request, msg,
redirect=self.get_redirect_url())
def get_context_data(self, **kwargs):
context = super(IPSecPolicyDetailsView, self).get_context_data(
**kwargs)
ipsecpolicy = self.get_data()
table = vpn_tables.IPSecPoliciesTable(self.request)
context["ipsecpolicy"] = ipsecpolicy
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(ipsecpolicy)
return context
def get_tabs(self, request, *args, **kwargs):
ipsecpolicy = self.get_data()
return self.tab_group_class(request, ipsecpolicy=ipsecpolicy, **kwargs)
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:vpn:index')
class VPNServiceDetailsView(tabs.TabView):
tab_group_class = vpn_tabs.VPNServiceDetailsTabs
template_name = 'project/vpn/details_tabs.html'
page_title = _("VPN Service Details")
@memoized.memoized_method
def get_data(self):
sid = self.kwargs['vpnservice_id']
try:
vpnservice = api.vpn.vpnservice_get(self.request, sid)
except Exception:
vpnservice = []
msg = _('Unable to retrieve VPN Service details.')
exceptions.handle(self.request, msg,
redirect=self.get_redirect_url())
try:
connections = api.vpn.ipsecsiteconnection_list(
self.request, vpnservice_id=sid)
vpnservice.vpnconnections = connections
except Exception:
vpnservice.vpnconnections = []
return vpnservice
def get_context_data(self, **kwargs):
context = super(VPNServiceDetailsView, self).get_context_data(**kwargs)
vpnservice = self.get_data()
table = vpn_tables.VPNServicesTable(self.request)
context["vpnservice"] = vpnservice
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(vpnservice)
return context
def get_tabs(self, request, *args, **kwargs):
vpnservice = self.get_data()
return self.tab_group_class(request, vpnservice=vpnservice, **kwargs)
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:vpn:index')
class IPSecSiteConnectionDetailsView(tabs.TabView):
tab_group_class = vpn_tabs.IPSecSiteConnectionDetailsTabs
template_name = 'project/vpn/details_tabs.html'
page_title = _("IPSec Site Connection Details")
@memoized.memoized_method
def get_data(self):
cid = self.kwargs['ipsecsiteconnection_id']
try:
return api.vpn.ipsecsiteconnection_get(self.request, cid)
except Exception:
msg = _('Unable to retrieve IPSec Site Connection details.')
exceptions.handle(self.request, msg,
redirect=self.get_redirect_url())
def get_context_data(self, **kwargs):
context = super(IPSecSiteConnectionDetailsView, self).get_context_data(
**kwargs)
ipsecsiteconnection = self.get_data()
table = vpn_tables.IPSecSiteConnectionsTable(self.request)
context["ipsecsiteconnection"] = ipsecsiteconnection
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(ipsecsiteconnection)
return context
def get_tabs(self, request, *args, **kwargs):
ipsecsiteconnection = self.get_data()
return self.tab_group_class(request,
ipsecsiteconnection=ipsecsiteconnection,
**kwargs)
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:vpn:index')
class UpdateVPNServiceView(forms.ModalFormView):
form_class = vpn_forms.UpdateVPNService
form_id = "update_vpnservice_form"
modal_header = _("Edit VPN Service")
template_name = "project/vpn/update_vpnservice.html"
context_object_name = 'vpnservice'
submit_label = _("Save Changes")
submit_url = "horizon:project:vpn:update_vpnservice"
success_url = reverse_lazy("horizon:project:vpn:index")
page_title = _("Edit VPN Service")
def get_context_data(self, **kwargs):
context = super(UpdateVPNServiceView, self).get_context_data(**kwargs)
context["vpnservice_id"] = self.kwargs['vpnservice_id']
args = (self.kwargs['vpnservice_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
vpnservice_id = self.kwargs['vpnservice_id']
try:
return api.vpn.vpnservice_get(self.request, vpnservice_id)
except Exception as e:
redirect = self.success_url
msg = _('Unable to retrieve VPN Service details. %s') % e
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
vpnservice = self._get_object()
return {'name': vpnservice['name'],
'vpnservice_id': vpnservice['id'],
'description': vpnservice['description'],
'admin_state_up': vpnservice['admin_state_up']}
class UpdateIKEPolicyView(forms.ModalFormView):
form_class = vpn_forms.UpdateIKEPolicy
form_id = "update_ikepolicy_form"
modal_header = _("Edit IKE Policy")
template_name = "project/vpn/update_ikepolicy.html"
context_object_name = 'ikepolicy'
submit_label = _("Save Changes")
submit_url = "horizon:project:vpn:update_ikepolicy"
success_url = reverse_lazy("horizon:project:vpn:index")
page_title = _("Edit IKE Policy")
def get_context_data(self, **kwargs):
context = super(UpdateIKEPolicyView, self).get_context_data(**kwargs)
context["ikepolicy_id"] = self.kwargs['ikepolicy_id']
args = (self.kwargs['ikepolicy_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
ikepolicy_id = self.kwargs['ikepolicy_id']
try:
return api.vpn.ikepolicy_get(self.request, ikepolicy_id)
except Exception as e:
redirect = self.success_url
msg = _('Unable to retrieve IKE Policy details. %s') % e
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
ikepolicy = self._get_object()
return {'name': ikepolicy['name'],
'ikepolicy_id': ikepolicy['id'],
'description': ikepolicy['description'],
'auth_algorithm': ikepolicy['auth_algorithm'],
'encryption_algorithm': ikepolicy['encryption_algorithm'],
'ike_version': ikepolicy['ike_version'],
'lifetime_units': ikepolicy['lifetime']['units'],
'lifetime_value': ikepolicy['lifetime']['value'],
'pfs': ikepolicy['pfs'],
'phase1_negotiation_mode': ikepolicy[
'phase1_negotiation_mode']}
class UpdateIPSecPolicyView(forms.ModalFormView):
form_class = vpn_forms.UpdateIPSecPolicy
form_id = "update_ipsecpolicy_form"
modal_header = _("Edit IPSec Policy")
template_name = "project/vpn/update_ipsecpolicy.html"
context_object_name = 'ipsecpolicy'
submit_label = _("Save Changes")
submit_url = "horizon:project:vpn:update_ipsecpolicy"
success_url = reverse_lazy("horizon:project:vpn:index")
page_title = _("Edit IPSec Policy")
def get_context_data(self, **kwargs):
context = super(UpdateIPSecPolicyView, self).get_context_data(**kwargs)
context["ipsecpolicy_id"] = self.kwargs['ipsecpolicy_id']
args = (self.kwargs['ipsecpolicy_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
ipsecpolicy_id = self.kwargs['ipsecpolicy_id']
try:
return api.vpn.ipsecpolicy_get(self.request, ipsecpolicy_id)
except Exception as e:
redirect = self.success_url
msg = _('Unable to retrieve IPSec Policy details. %s') % e
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
ipsecpolicy = self._get_object()
return {'name': ipsecpolicy['name'],
'ipsecpolicy_id': ipsecpolicy['id'],
'description': ipsecpolicy['description'],
'auth_algorithm': ipsecpolicy['auth_algorithm'],
'encapsulation_mode': ipsecpolicy['encapsulation_mode'],
'encryption_algorithm': ipsecpolicy['encryption_algorithm'],
'lifetime_units': ipsecpolicy['lifetime']['units'],
'lifetime_value': ipsecpolicy['lifetime']['value'],
'pfs': ipsecpolicy['pfs'],
'transform_protocol': ipsecpolicy['transform_protocol']}
class UpdateIPSecSiteConnectionView(forms.ModalFormView):
form_class = vpn_forms.UpdateIPSecSiteConnection
form_id = "update_ipsecsiteconnection_form"
modal_header = _("Edit IPSec Site Connection")
template_name = "project/vpn/update_ipsecsiteconnection.html"
context_object_name = 'ipsecsiteconnection'
submit_label = _("Save Changes")
submit_url = "horizon:project:vpn:update_ipsecsiteconnection"
success_url = reverse_lazy("horizon:project:vpn:index")
page_title = _("Edit IPSec Site Connection")
def get_context_data(self, **kwargs):
context = super(
UpdateIPSecSiteConnectionView, self).get_context_data(**kwargs)
context["ipsecsiteconnection_id"] = self.kwargs[
'ipsecsiteconnection_id']
args = (self.kwargs['ipsecsiteconnection_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
connection_id = self.kwargs['ipsecsiteconnection_id']
try:
return api.vpn.ipsecsiteconnection_get(self.request, connection_id)
except Exception as e:
redirect = self.success_url
msg = _('Unable to retrieve IPSec Site Connection details. %s') % e
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
ipsecsiteconnection = self._get_object()
return {'name': ipsecsiteconnection['name'],
'ipsecsiteconnection_id': ipsecsiteconnection['id'],
'description': ipsecsiteconnection['description'],
'peer_address': ipsecsiteconnection['peer_address'],
'peer_id': ipsecsiteconnection['peer_id'],
'peer_cidrs': ", ".join(ipsecsiteconnection['peer_cidrs']),
'psk': ipsecsiteconnection['psk'],
'mtu': ipsecsiteconnection['mtu'],
'dpd_action': ipsecsiteconnection['dpd']['action'],
'dpd_interval': ipsecsiteconnection['dpd']['interval'],
'dpd_timeout': ipsecsiteconnection['dpd']['timeout'],
'initiator': ipsecsiteconnection['initiator'],
'admin_state_up': ipsecsiteconnection['admin_state_up']}
| apache-2.0 |
Poles/Poles | platforms/linux/JsonCpp/scons-local-2.3.0/SCons/Options/PackageOption.py | 11 | 2030 | #
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PackageOption.py 2013/03/03 09:48:35 garyo"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def PackageOption(*args, **kw):
global warned
if not warned:
msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.PackageVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-3.0 |
dongjoon-hyun/tensorflow | tensorflow/contrib/learn/python/learn/estimators/model_fn.py | 34 | 12336 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and methods related to model_fn (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.contrib.framework import get_graph_from_inputs
from tensorflow.contrib.learn.python.learn.estimators import constants
from tensorflow.contrib.learn.python.learn.estimators import metric_key
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.python.estimator import model_fn as core_model_fn_lib
from tensorflow.python.estimator.export import export_output as core_export_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import session_run_hook
from tensorflow.python.util.deprecation import deprecated
class ModeKeys(object):
"""Standard names for model modes (deprecated).
THIS CLASS IS DEPRECATED.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `INFER`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
INFER = 'infer'
@classmethod
def validate(cls, key):
if key not in (cls.TRAIN, cls.EVAL, cls.INFER):
raise ValueError('Invalid mode %s.' % key)
class ModelFnOps(
collections.namedtuple('ModelFnOps', [
'predictions', 'loss', 'train_op', 'eval_metric_ops',
'output_alternatives', 'training_chief_hooks', 'training_hooks',
'scaffold', 'mode'
])):
"""Ops returned from a model_fn.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
"""
@deprecated(None, 'When switching to tf.estimator.Estimator, use '
'tf.estimator.EstimatorSpec. You can use the `estimator_spec`'
' method to create an equivalent one.')
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
output_alternatives=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None):
"""Creates a validated `ModelFnOps` instance.
For a multi-headed model, the predictions dict here will contain the outputs
of all of the heads. However: at serving time, requests will be made
specifically for one or more heads, and the RPCs used for these requests may
differ by problem type (i.e., regression, classification, other). The
purpose of the output_alternatives dict is to aid in exporting a SavedModel
from which such head-specific queries can be served. These
output_alternatives will be combined with input_alternatives (see
`saved_model_export_utils`) to produce a set of `SignatureDef`s specifying
the valid requests that can be served from this model.
For a single-headed model, it is still adviseable to provide
output_alternatives with a single entry, because this is how the problem
type is communicated for export and serving. If output_alternatives is not
given, the resulting SavedModel will support only one head of unspecified
type.
Args:
mode: One of `ModeKeys`. Specifies if this training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, such as `Tensor`.
output_alternatives: a dict of
`{submodel_name: (problem_type, {tensor_name: Tensor})}`, where
`submodel_name` is a submodel identifier that should be consistent
across the pipeline (here likely taken from the name of each `Head`,
for models that use them), `problem_type` is a `ProblemType`,
`tensor_name` is a symbolic name for an output Tensor possibly but not
necessarily taken from `PredictionKey`, and `Tensor` is the
corresponding output Tensor itself.
training_chief_hooks: A list of `SessionRunHook` objects that will be
run on the chief worker during training.
training_hooks: A list of `SessionRunHook` objects that will be run on
all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
Returns:
A validated `ModelFnOps` object.
Raises:
ValueError: If validation fails.
"""
ModeKeys.validate(mode)
# Assert all ops are from the same graph.
get_graph_from_inputs((predictions, loss, train_op))
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
elif not isinstance(train_op, ops.Operation):
# TODO(ptucker): Should this be allowed? Consider raising error.
train_op = ops.convert_to_tensor(train_op).op
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = ops.convert_to_tensor(loss)
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar: %s.' % loss)
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.INFER or mode == ModeKeys.EVAL:
raise ValueError('Missing predictions.')
else:
if isinstance(predictions, dict):
predictions = {
k: sparse_tensor.convert_to_tensor_or_sparse_tensor(v)
for k, v in six.iteritems(predictions)
}
else:
predictions = sparse_tensor.convert_to_tensor_or_sparse_tensor(
predictions)
# Validate eval_metric_ops
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise ValueError('eval_metric_ops must be a dict.')
# Validate hooks
if training_chief_hooks is None:
training_chief_hooks = []
if training_hooks is None:
training_hooks = []
for hook in training_hooks + training_chief_hooks:
if not isinstance(hook, session_run_hook.SessionRunHook):
raise TypeError('All hooks returned from model_fn must be '
'SessionRunHook instances, got instance of %s: %s' %
(type(hook), hook))
return super(ModelFnOps, cls).__new__(
cls,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
output_alternatives=output_alternatives,
training_chief_hooks=training_chief_hooks,
training_hooks=training_hooks,
scaffold=scaffold,
mode=mode)
def estimator_spec(self, default_serving_output_alternative_key=None):
"""Creates an equivalent `EstimatorSpec`.
Args:
default_serving_output_alternative_key: Required for multiple heads. If
you have multiple entries in `output_alternatives` dict (comparable to
multiple heads), `EstimatorSpec` requires a default head that will be
used if a Servo request does not explicitly mention which head to infer
on. Pass the key of the output alternative here that you want to
designate as default. A separate ExportOutpout for this default head
wil be added to the export_outputs dict with the special key
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, unless there is
already an enry in output_alternatives with this special key.
Returns:
Instance of `EstimatorSpec` that is equivalent to this `ModelFnOps`
Raises:
ValueError: If problem type is unknown.
"""
def _scores(output_tensors):
scores = output_tensors.get(prediction_key.PredictionKey.SCORES)
if scores is None:
scores = output_tensors.get(prediction_key.PredictionKey.PROBABILITIES)
return scores
def _classes(output_tensors): # pylint: disable=missing-docstring
classes = output_tensors.get(prediction_key.PredictionKey.CLASSES)
if classes is None:
logging.warning(
'classes is None, Servo inference will not have class ids.')
return None
elif classes.dtype != dtypes.string:
# Servo classification can only serve string classes
logging.warning(
'classes is not string, Servo inference will not have class ids.')
return None
return classes
def _export_output(problem_type, predictions): # pylint: disable=missing-docstring
if problem_type == constants.ProblemType.LINEAR_REGRESSION:
return core_export_lib.RegressionOutput(_scores(predictions))
if (problem_type == constants.ProblemType.CLASSIFICATION or
problem_type == constants.ProblemType.LOGISTIC_REGRESSION):
return core_export_lib.ClassificationOutput(
scores=_scores(predictions), classes=_classes(predictions))
if problem_type == constants.ProblemType.UNSPECIFIED:
return core_export_lib.PredictOutput(predictions)
raise ValueError('Unknown problem_type=%s' % problem_type)
# Converts output_alternatives
export_outputs_dict = None
if self.output_alternatives:
output_alternatives = self.output_alternatives
# Adds default output_alternative if needed.
if (len(output_alternatives) > 1 and
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY not in
output_alternatives):
output_alternatives = output_alternatives.copy()
output_alternatives[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = (
output_alternatives[default_serving_output_alternative_key])
export_outputs_dict = {key: _export_output(*val) for key, val in
output_alternatives.items()}
def _get_eval_metric_ops():
"""Returns self.eval_metric_ops without loss metric."""
result = {}
for key, value in six.iteritems(self.eval_metric_ops):
if key != metric_key.MetricKey.LOSS:
result[key] = value
return result
# Convert the contrib mode enum to the core mode enum.
# Note: mode already validated in __new__().
if self.mode == ModeKeys.TRAIN:
core_mode = core_model_fn_lib.ModeKeys.TRAIN
elif self.mode == ModeKeys.EVAL:
core_mode = core_model_fn_lib.ModeKeys.EVAL
elif self.mode == ModeKeys.INFER:
core_mode = core_model_fn_lib.ModeKeys.PREDICT
return core_model_fn_lib.EstimatorSpec(
mode=core_mode,
predictions=self.predictions,
loss=self.loss,
train_op=self.train_op,
eval_metric_ops=_get_eval_metric_ops(),
export_outputs=export_outputs_dict,
training_chief_hooks=self.training_chief_hooks,
training_hooks=self.training_hooks,
scaffold=self.scaffold)
| apache-2.0 |
ibmsoe/tensorflow | tensorflow/contrib/distributions/python/ops/special_math.py | 23 | 9370 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Special Math Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
__all__ = [
"ndtr",
"log_ndtr",
"log_cdf_laplace",
]
# log_ndtr uses different functions over the ranges
# (-infty, lower](lower, upper](upper, infty)
# Lower bound values were chosen by examining where the support of ndtr
# appears to be zero, relative to scipy's (which is always 64bit). They were
# then made more conservative just to be safe. (Conservative means use the
# expansion more than we probably need to.) See `NdtrTest` in
# special_math_test.py.
LOGNDTR_FLOAT64_LOWER = -20
LOGNDTR_FLOAT32_LOWER = -10
# Upper bound values were chosen by examining for which values of 'x'
# Log[cdf(x)] is 0, after which point we need to use the approximation
# Log[cdf(x)] = Log[1 - cdf(-x)] approx -cdf(-x). We chose a value slightly
# conservative, meaning we use the approximation earlier than needed.
LOGNDTR_FLOAT64_UPPER = 8
LOGNDTR_FLOAT32_UPPER = 5
def ndtr(x, name="ndtr"):
"""Normal distribution function.
Returns the area under the Gaussian probability density function, integrated
from minus infinity to x:
```
1 / x
ndtr(x) = ---------- | exp(-0.5 t**2) dt
sqrt(2 pi) /-inf
= 0.5 (1 + erf(x / sqrt(2)))
= 0.5 erfc(x / sqrt(2))
```
Args:
x: `Tensor` of type `float32`, `float64`.
name: Python string. A name for the operation (default="ndtr").
Returns:
ndtr: `Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x` is not floating-type.
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
if x.dtype.as_numpy_dtype not in [np.float32, np.float64]:
raise TypeError(
"x.dtype=%s is not handled, see docstring for supported types."
% x.dtype)
return _ndtr(x)
def _ndtr(x):
"""Implements ndtr core logic."""
half_sqrt_2 = constant_op.constant(
0.5 * math.sqrt(2.), dtype=x.dtype, name="half_sqrt_2")
w = x * half_sqrt_2
z = math_ops.abs(w)
y = array_ops.where(math_ops.less(z, half_sqrt_2),
1. + math_ops.erf(w),
array_ops.where(math_ops.greater(w, 0.),
2. - math_ops.erfc(z),
math_ops.erfc(z)))
return 0.5 * y
def log_ndtr(x, series_order=3, name="log_ndtr"):
"""Log Normal distribution function.
For details of the Normal distribution function see `ndtr`.
This function calculates `(log o ndtr)(x)` by either calling `log(ndtr(x))` or
using an asymptotic series. Specifically:
- For `x > upper_segment`, use the approximation `-ndtr(-x)` based on
`log(1-x) ~= -x, x << 1`.
- For `lower_segment < x <= upper_segment`, use the existing `ndtr` technique
and take a log.
- For `x <= lower_segment`, we use the series approximation of erf to compute
the log CDF directly.
The `lower_segment` is set based on the precision of the input:
```
lower_segment = { -20, x.dtype=float64
{ -10, x.dtype=float32
upper_segment = { 8, x.dtype=float64
{ 5, x.dtype=float32
```
When `x < lower_segment`, the `ndtr` asymptotic series approximation is:
```
ndtr(x) = scale * (1 + sum) + R_N
scale = exp(-0.5 x**2) / (-x sqrt(2 pi))
sum = Sum{(-1)^n (2n-1)!! / (x**2)^n, n=1:N}
R_N = O(exp(-0.5 x**2) (2N+1)!! / |x|^{2N+3})
```
where `(2n-1)!! = (2n-1) (2n-3) (2n-5) ... (3) (1)` is a
[double-factorial](https://en.wikipedia.org/wiki/Double_factorial).
Args:
x: `Tensor` of type `float32`, `float64`.
series_order: Positive Python `integer`. Maximum depth to
evaluate the asymptotic expansion. This is the `N` above.
name: Python string. A name for the operation (default="log_ndtr").
Returns:
log_ndtr: `Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x.dtype` is not handled.
TypeError: if `series_order` is a not Python `integer.`
ValueError: if `series_order` is not in `[0, 30]`.
"""
if not isinstance(series_order, int):
raise TypeError("series_order must be a Python integer.")
if series_order < 0:
raise ValueError("series_order must be non-negative.")
if series_order > 30:
raise ValueError("series_order must be <= 30.")
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
if x.dtype.as_numpy_dtype == np.float64:
lower_segment = LOGNDTR_FLOAT64_LOWER
upper_segment = LOGNDTR_FLOAT64_UPPER
elif x.dtype.as_numpy_dtype == np.float32:
lower_segment = LOGNDTR_FLOAT32_LOWER
upper_segment = LOGNDTR_FLOAT32_UPPER
else:
raise TypeError("x.dtype=%s is not supported." % x.dtype)
# The basic idea here was ported from py/scipy/special/cephes/ndtr.c.
# We copy the main idea, with a few changes
# * For x >> 1, and X ~ Normal(0, 1),
# Log[P[X < x]] = Log[1 - P[X < -x]] approx -P[X < -x],
# which extends the range of validity of this function.
# * We use one fixed series_order for all of 'x', rather than adaptive.
# * Our docstring properly reflects that this is an asymptotic series, not a
# Taylor series. We also provided a correct bound on the remainder.
# * We need to use the max/min in the _log_ndtr_lower arg to avoid nan when
# x=0. This happens even though the branch is unchosen because when x=0
# the gradient of a select involves the calculation 1*dy+0*(-inf)=nan
# regardless of whether dy is finite. Note that the minimum is a NOP if
# the branch is chosen.
return array_ops.where(
math_ops.greater(x, upper_segment),
-_ndtr(-x), # log(1-x) ~= -x, x << 1
array_ops.where(math_ops.greater(x, lower_segment),
math_ops.log(_ndtr(math_ops.maximum(x, lower_segment))),
_log_ndtr_lower(math_ops.minimum(x, lower_segment),
series_order)))
def _log_ndtr_lower(x, series_order):
"""Asymptotic expansion version of `Log[cdf(x)]`, apppropriate for `x<<-1`."""
x_2 = math_ops.square(x)
# Log of the term multiplying (1 + sum)
log_scale = -0.5 * x_2 - math_ops.log(-x) - 0.5 * math.log(2. * math.pi)
return log_scale + math_ops.log(_log_ndtr_asymptotic_series(x, series_order))
def _log_ndtr_asymptotic_series(x, series_order):
"""Calculates the asymptotic series used in log_ndtr."""
if series_order <= 0:
return 1.
x_2 = math_ops.square(x)
even_sum = 0.
odd_sum = 0.
x_2n = x_2 # Start with x^{2*1} = x^{2*n} with n = 1.
for n in range(1, series_order + 1):
if n % 2:
odd_sum += _double_factorial(2 * n - 1) / x_2n
else:
even_sum += _double_factorial(2 * n - 1) / x_2n
x_2n *= x_2
return 1. + even_sum - odd_sum
def _double_factorial(n):
"""The double factorial function for small Python integer `n`."""
return np.prod(np.arange(n, 1, -2))
def log_cdf_laplace(x, name="log_cdf_laplace"):
"""Log Laplace distribution function.
This function calculates `Log[L(x)]`, where `L(x)` is the cumulative
distribution function of the Laplace distribution, i.e.
```L(x) := 0.5 * int_{-infty}^x e^{-|t|} dt```
For numerical accuracy, `L(x)` is computed in different ways depending on `x`,
```
x <= 0:
Log[L(x)] = Log[0.5] + x, which is exact
0 < x:
Log[L(x)] = Log[1 - 0.5 * e^{-x}], which is exact
```
Args:
x: `Tensor` of type `float32`, `float64`.
name: Python string. A name for the operation (default="log_ndtr").
Returns:
`Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x.dtype` is not handled.
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
# For x < 0, L(x) = 0.5 * exp{x} exactly, so Log[L(x)] = log(0.5) + x.
lower_solution = -np.log(2.) + x
# safe_exp_neg_x = exp{-x} for x > 0, but is
# bounded above by 1, which avoids
# log[1 - 1] = -inf for x = log(1/2), AND
# exp{-x} --> inf, for x << -1
safe_exp_neg_x = math_ops.exp(-math_ops.abs(x))
# log1p(z) = log(1 + z) approx z for |z| << 1. This approxmation is used
# internally by log1p, rather than being done explicitly here.
upper_solution = math_ops.log1p(-0.5 * safe_exp_neg_x)
return array_ops.where(x < 0., lower_solution, upper_solution)
| apache-2.0 |
AlexLisovoy/teco | tests.py | 1 | 1685 | import unittest
from teco.terms_counter import TermsCounter
class TecoTestCase(unittest.TestCase):
def setUp(self):
self.terms = ["is", "example term", "a term",
"this this", "computer science"]
self.counter = TermsCounter(self.terms)
def test_term_counting(self):
counter = TermsCounter(["is", "example term"])
answer = counter.compute_occurrences_count(
"This is an example term")
self.assertEqual((1, 1,), tuple(answer))
def test_text_with_no_terms(self):
counter = TermsCounter(["xterm"])
answer = counter.compute_occurrences_count(
"This is a term.")
self.assertEqual((0,), tuple(answer))
def test_terms_overlaps(self):
counter = TermsCounter(["this this"])
answer = counter.compute_occurrences_count(
"This this this this")
self.assertEqual((2,), tuple(answer))
def test_case_sensitive_counting(self):
text = "ALICE.Alice.alice"
counter = TermsCounter(["alice"])
self.assertEqual((3,), tuple(counter.compute_occurrences_count(text)))
def test_terms_counting_with_new_line(self):
text = "computer\nscience,computer\n\nscience"
counter = TermsCounter(["computer science"])
self.assertEqual((1,), tuple(counter.compute_occurrences_count(text)))
def test_multiple_spaces(self):
counter = TermsCounter(["computer science"])
text = """computer science is the scientific and practical
approach to computation and its applications"""
self.assertEqual((1,), tuple(counter.compute_occurrences_count(text)))
| mit |
deroneriksson/incubator-systemml | src/main/python/tests/test_mllearn_df.py | 12 | 5320 | #!/usr/bin/python
#-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
# To run:
# - Python 2: `PYSPARK_PYTHON=python2 spark-submit --master local[*] --driver-class-path SystemML.jar test_mllearn_df.py`
# - Python 3: `PYSPARK_PYTHON=python3 spark-submit --master local[*] --driver-class-path SystemML.jar test_mllearn_df.py`
# Make the `systemml` package importable
import os
import sys
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../")
sys.path.insert(0, path)
import unittest
import numpy as np
from pyspark.ml import Pipeline
from pyspark.ml.feature import HashingTF, Tokenizer
from pyspark.sql import SparkSession
from sklearn import datasets, metrics, neighbors
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import linear_model
from sklearn.metrics import accuracy_score, r2_score
from systemml.mllearn import LinearRegression, LogisticRegression, NaiveBayes, SVM
sparkSession = SparkSession.builder.getOrCreate()
# Currently not integrated with JUnit test
# ~/spark-1.6.1-scala-2.11/bin/spark-submit --master local[*] --driver-class-path SystemML.jar test.py
class TestMLLearn(unittest.TestCase):
def test_logistic_sk2(self):
digits = datasets.load_digits()
X_digits = digits.data
y_digits = digits.target
n_samples = len(X_digits)
X_train = X_digits[:int(.9 * n_samples)]
y_train = y_digits[:int(.9 * n_samples)]
X_test = X_digits[int(.9 * n_samples):]
y_test = y_digits[int(.9 * n_samples):]
# Convert to DataFrame for i/o: current way to transfer data
logistic = LogisticRegression(sparkSession, transferUsingDF=True)
logistic.fit(X_train, y_train)
mllearn_predicted = logistic.predict(X_test)
sklearn_logistic = linear_model.LogisticRegression()
sklearn_logistic.fit(X_train, y_train)
self.failUnless(accuracy_score(sklearn_logistic.predict(X_test), mllearn_predicted) > 0.95) # We are comparable to a similar algorithm in scikit learn
def test_linear_regression(self):
diabetes = datasets.load_diabetes()
diabetes_X = diabetes.data[:, np.newaxis, 2]
diabetes_X_train = diabetes_X[:-20]
diabetes_X_test = diabetes_X[-20:]
diabetes_y_train = diabetes.target[:-20]
diabetes_y_test = diabetes.target[-20:]
regr = LinearRegression(sparkSession, solver='direct-solve', transferUsingDF=True)
regr.fit(diabetes_X_train, diabetes_y_train)
mllearn_predicted = regr.predict(diabetes_X_test)
sklearn_regr = linear_model.LinearRegression()
sklearn_regr.fit(diabetes_X_train, diabetes_y_train)
self.failUnless(r2_score(sklearn_regr.predict(diabetes_X_test), mllearn_predicted) > 0.95) # We are comparable to a similar algorithm in scikit learn
def test_linear_regression_cg(self):
diabetes = datasets.load_diabetes()
diabetes_X = diabetes.data[:, np.newaxis, 2]
diabetes_X_train = diabetes_X[:-20]
diabetes_X_test = diabetes_X[-20:]
diabetes_y_train = diabetes.target[:-20]
diabetes_y_test = diabetes.target[-20:]
regr = LinearRegression(sparkSession, solver='newton-cg', transferUsingDF=True)
regr.fit(diabetes_X_train, diabetes_y_train)
mllearn_predicted = regr.predict(diabetes_X_test)
sklearn_regr = linear_model.LinearRegression()
sklearn_regr.fit(diabetes_X_train, diabetes_y_train)
self.failUnless(r2_score(sklearn_regr.predict(diabetes_X_test), mllearn_predicted) > 0.95) # We are comparable to a similar algorithm in scikit learn
def test_svm_sk2(self):
digits = datasets.load_digits()
X_digits = digits.data
y_digits = digits.target
n_samples = len(X_digits)
X_train = X_digits[:int(.9 * n_samples)]
y_train = y_digits[:int(.9 * n_samples)]
X_test = X_digits[int(.9 * n_samples):]
y_test = y_digits[int(.9 * n_samples):]
svm = SVM(sparkSession, is_multi_class=True, transferUsingDF=True)
mllearn_predicted = svm.fit(X_train, y_train).predict(X_test)
from sklearn import linear_model, svm
clf = svm.LinearSVC()
sklearn_predicted = clf.fit(X_train, y_train).predict(X_test)
self.failUnless(accuracy_score(sklearn_predicted, mllearn_predicted) > 0.95 )
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
goldsborough/.emacs | .emacs.d/.python-environments/default/lib/python3.5/encodings/iso8859_8.py | 272 | 11036 | """ Python Character Mapping Codec iso8859_8 generated from 'MAPPINGS/ISO8859/8859-8.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-8',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\ufffe'
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xd7' # 0xAA -> MULTIPLICATION SIGN
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xf7' # 0xBA -> DIVISION SIGN
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u2017' # 0xDF -> DOUBLE LOW LINE
'\u05d0' # 0xE0 -> HEBREW LETTER ALEF
'\u05d1' # 0xE1 -> HEBREW LETTER BET
'\u05d2' # 0xE2 -> HEBREW LETTER GIMEL
'\u05d3' # 0xE3 -> HEBREW LETTER DALET
'\u05d4' # 0xE4 -> HEBREW LETTER HE
'\u05d5' # 0xE5 -> HEBREW LETTER VAV
'\u05d6' # 0xE6 -> HEBREW LETTER ZAYIN
'\u05d7' # 0xE7 -> HEBREW LETTER HET
'\u05d8' # 0xE8 -> HEBREW LETTER TET
'\u05d9' # 0xE9 -> HEBREW LETTER YOD
'\u05da' # 0xEA -> HEBREW LETTER FINAL KAF
'\u05db' # 0xEB -> HEBREW LETTER KAF
'\u05dc' # 0xEC -> HEBREW LETTER LAMED
'\u05dd' # 0xED -> HEBREW LETTER FINAL MEM
'\u05de' # 0xEE -> HEBREW LETTER MEM
'\u05df' # 0xEF -> HEBREW LETTER FINAL NUN
'\u05e0' # 0xF0 -> HEBREW LETTER NUN
'\u05e1' # 0xF1 -> HEBREW LETTER SAMEKH
'\u05e2' # 0xF2 -> HEBREW LETTER AYIN
'\u05e3' # 0xF3 -> HEBREW LETTER FINAL PE
'\u05e4' # 0xF4 -> HEBREW LETTER PE
'\u05e5' # 0xF5 -> HEBREW LETTER FINAL TSADI
'\u05e6' # 0xF6 -> HEBREW LETTER TSADI
'\u05e7' # 0xF7 -> HEBREW LETTER QOF
'\u05e8' # 0xF8 -> HEBREW LETTER RESH
'\u05e9' # 0xF9 -> HEBREW LETTER SHIN
'\u05ea' # 0xFA -> HEBREW LETTER TAV
'\ufffe'
'\ufffe'
'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
46elks/elkme | elkme/config.py | 1 | 5290 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 46elks AB <hello@46elks.com>
# Developed in 2015 by Emil Tullstedt <emil@46elks.com>
# Licensed under the MIT License
from __future__ import print_function
try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
import sys, os, stat
import platform
from subprocess import call
template = """\
#
# elkme configuration file
#
###
# AUTHENTICATION
#
# Your API keys from https://dashboard.46elks.com goes here. Please keep
# these somewhat secret
###
{username}
{password}
###
# SENDING SMS DETAILS
#
# Set a default recipient (with the `to` key) and a default sender
# (using the `from` key). Your `to` key must be a E.163 international format
# phone number and your `from` key must either be E.163 or a valid
# alphanumerical sender (starting with letter, maximum of 11 letters/digits)
#
# The from-number should be either your own number (as you registered it on
# your 46elks account) or a 46elks number on your account.
###
{from}
{to}
###
# ROUTING
#
# If you have a mock 46elks server available for debugging purposes or a
# proxy or something, you can use the `api_url` key to route your API calls
# to it. Most people will want to leave this at it's default
###
# api_url = https://api.46elks.com/a1
"""
def init_config(args):
status = (True, None)
conffile = locate_config(args)
conf = {}
try:
conf = read_config(conffile)
except IOError as e:
pass
conf = update_config_with_args(conf, args)
if args.saveconf:
status = save_config(conf, conffile)
if args.editconf:
open_text_editor(conffile)
return (conf, status)
def open_text_editor(destfile):
edited = False
editors = [
os.environ.get('EDITOR'),
'nano',
'vim',
'vi',
'emacs',
'gedit'
]
for editor in editors:
if not editor:
continue
retcode = call(['which', editor]) # Is the text editor in PATH?
if not retcode: # UNIX returns 0 on success
call([editor, destfile])
edited = True
break
if not edited:
print('Couldn\'t find a text editor on your system')
else:
print('Done editing the configuration file')
sys.exit(0)
def locate_config(args):
if args.configfile:
return os.path.expanduser(args.configfile)
else:
return default_config_location()
def default_config_location(Filename="elkme"):
home = os.path.expanduser('~')
location = home + os.sep + "." + Filename
if platform.system() == "Darwin":
path = home + os.sep + "Library" + os.sep + "Application Support"\
+ os.sep
location = path + Filename
elif platform.system() == "Linux":
path = home + os.sep + ".config" + os.sep
location = path + Filename
if not os.path.isdir(path):
os.mkdir(path)
elif platform.system() == "Windows":
# Might break on Windows <= XP
# That's ok, since XP is no longer supported by MSFT
location = os.environ["LOCALAPPDATA"] + os.sep + Filename + ".ini"
return location
def read_config(path, section="46elks"):
settings = {}
with open(path, 'r') as f:
row = 0
for line in f:
row += 1
line = line.strip()
if not line or line[0] in '[#':
continue
line = line.split('=', 1)
if len(line) != 2:
print('[ERROR] Expected = delimited on line {}'.format(row))
continue
key = line[0].strip()
value = line[1].strip()
settings[key] = value
return settings
def generate_config(conf):
"""
Generates a configuration file using the ConfigParser library that
can be saved to a file for subsequent reads
"""
c = {}
def to_key(key, default):
if conf.get(key):
c[key] = '{} = {}'.format(key, conf.get(key))
else:
c[key] = default
to_key('username', '# username = u1234...')
to_key('password', '# password = secret...')
to_key('from', '# from = elkme')
to_key('to', '# to = +46700000000')
return (True, c)
def update_config_with_args(conf, args):
conf['verbose'] = True
if args.verbose >= 1 and args.quiet < 1:
conf['debug'] = True
if args.to:
conf['to'] = args.to
if args.sender:
conf['from'] = args.sender
if args.username:
conf['username'] = args.username
if args.password:
conf['password'] = args.password
return conf
def save_config(conf, conffile):
status = None
try:
with open(conffile, 'w') as fdest:
status, settings = generate_config(conf)
fdest.write(template.format(**settings))
# Set file permissions to 600 for the configuration file
# (this is to prevent malicious usage of your API keys)
os.chmod(conffile, stat.S_IRUSR | stat.S_IWUSR)
except IOError as e:
return (False, 'Failed updating configuration file:\n{}'.format(e))
if status:
print('Updated configuration file {}'.format(conffile))
return (True, status)
| mit |
datasciencesg/knowledge-base | MOOC/Computer-Science-and-Python/ProblemSet4/ps4b.py | 1 | 10890 | from ps4a import *
import time
#
#
# Problem #6: Computer chooses a word
#
#
def loadWords():
"""
Returns a list of valid words. Words are strings of lowercase letters.
Depending on the size of the word list, this function may
take a while to finish.
"""
print "Loading word list from file..."
# inFile: file
inFile = open(WORDLIST_FILENAME, 'r', 0)
# wordList: list of strings
wordList = []
for line in inFile:
wordList.append(line.strip().lower())
print " ", len(wordList), "words loaded."
return wordList
def isValidWord(word, hand, wordList):
"""
Returns True if word is in the wordList and is entirely
composed of letters in the hand. Otherwise, returns False.
Does not mutate hand or wordList.
word: string
hand: dictionary (string -> int)
wordList: list of lowercase strings
"""
# creates a copy of the hand for the function
current_hand = hand.copy()
# for each char in word, check if the value of the corresponding char in
# current_hand is > 0. If the char is 0, return False. If the char is
# not in current hand, which results in a KeyError, return False.
# Otherwise, return True
for char in word:
try:
if current_hand[char] > 0:
current_hand[char] -= 1
# print 'key : ' + char + ', value :' + str(current_hand[char])
elif current_hand[char] == 0:
return False
except KeyError, e:
return False
return True
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
# initialize score to be 0
score = 0
# find the score for each char in word
for char in word:
score += SCRABBLE_LETTER_VALUES[char]
# multiply the current score by length of the word
score *= len(word)
# if all n letters used, add 50 to score
if len(word) == n:
score += 50
return score
def compChooseWord(hand, wordList, n):
"""
Given a hand and a wordList, find the word that gives
the maximum value score, and return it.
This word should be calculated by considering all the words
in the wordList.
If no words in the wordList can be made from the hand, return None.
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: string or None
"""
# Create a new variable to store the maximum score seen so far (initially 0)
max_score = 0
# print 'init_max_score :' + str(max_score)
# Create a new variable to store the best word seen so far (initially None)
best_word = None
# print 'init_best_word: ' + str(best_word)
# For each word in the wordList
for word in wordList:
# print 'loop_running...'
# If you can construct the word from your hand
# (hint: you can use isValidWord, or - since you don't really need to test if the word is in the wordList - you can make a similar function that omits that test)
if isValidWord(word, hand, wordList):
# print 'is_valid_word_running = True'
# Find out how much making that word is worth
score = getWordScore(word, n)
# print 'current_score: ' + str(score)
# If the score for that word is higher than your best score
if score > max_score:
# Update your best score, and best word accordingly
max_score = score
best_word = word
# print 'max_score: ' + str(max_score)
# print 'best_word: ' + str(best_word)
# return the best word you found.
return best_word
#
# Problem #7: Computer plays a hand
#
def compPlayHand(hand, wordList, n):
"""
Allows the computer to play the given hand, following the same procedure
as playHand, except instead of the user choosing a word, the computer
chooses it.
1) The hand is displayed.
2) The computer chooses a word.
3) After every valid word: the word and the score for that word is
displayed, the remaining letters in the hand are displayed, and the
computer chooses another word.
4) The sum of the word scores is displayed when the hand finishes.
5) The hand finishes when the computer has exhausted its possible
choices (i.e. compChooseWord returns None).
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
"""
# Keep track of the total score (initialize score to 0)
total_score = 0
# As long as there are still letters left in the hand:
while calculateHandlen(hand) > 0:
# Display the hand
print 'Current Hand:',
displayHand(hand)
print
# check if the computer can still choose a word; if None, that means
# no words available
if compChooseWord(hand, wordList, n) == None:
break
# if not None, means there are words available
else:
# computer chooses a word
word = compChooseWord(hand, wordList, n)
word_score = getWordScore(word, n)
total_score += word_score
print '"' + word + '"' + ' earned ' + str(word_score) +' points. Total: ' + str(total_score) + ' points.'
print
# Update the hand
hand = updateHand(hand, word)
print 'Total score: ' + str(total_score) + ' points.'
#
# Problem #8: Playing a game
#
#
def playGame(wordList):
"""
Allow the user to play an arbitrary number of hands.
1) Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'e', immediately exit the game.
* If the user inputs anything that's not 'n', 'r', or 'e', keep asking them again.
2) Asks the user to input a 'u' or a 'c'.
* If the user inputs anything that's not 'c' or 'u', keep asking them again.
3) Switch functionality based on the above choices:
* If the user inputted 'n', play a new (random) hand.
* Else, if the user inputted 'r', play the last hand again.
* If the user inputted 'u', let the user play the game
with the selected hand, using playHand.
* If the user inputted 'c', let the computer play the
game with the selected hand, using compPlayHand.
4) After the computer or user has played the hand, repeat from step 1
wordList: list (string)
"""
# initialize empty hand
hand = {}
while True:
# initialize variable for game_choice
game_choice = str(raw_input('Enter n to deal a new hand, r to replay the last hand, or e to end game: '))
# if player chooses to exit, break the loop
if game_choice == 'e':
break
# if player chooses to replay but hand is empty, print error message
elif game_choice == 'r':
if hand == {}:
print 'You have not played a hand yet. Please play a new hand first!'
else:
# if hand is not empty, prompt for player choice of user or computer
player_choice = str(raw_input('Enter u to have yourself play, c to have the computer play: '))
# if player chooses to play, initiate playHand
if player_choice == 'u':
playHand(hand, wordList, HAND_SIZE)
# if player chooses computer to play, initiate compPlayHand
elif player_choice == 'c':
compPlayHand(hand, wordList, HAND_SIZE)
# if player chooses another other input, print error message
else:
print 'Invalid command.'
# if player chooses to play new game or replay old hand, ask for player
# choice of either user or computer
elif game_choice == 'n':
player_choice = str(raw_input('Enter u to have yourself play, c to have the computer play: '))
hand = dealHand(HAND_SIZE)
# if player chooses to play, initiate playHand
if player_choice == 'u':
playHand(hand, wordList, HAND_SIZE)
# if players chooses computer to play, initiate compPlayHand
elif player_choice == 'c':
compPlayHand(hand, wordList, HAND_SIZE)
# if player chooses another other input, print error message
#else:
print 'Invalid command.'
# if any other input, print error message
else:
print 'Invalid command.'
## if player had chosen to start new game, deal new hand
#if game_choice == 'n':
# hand = dealHand(HAND_SIZE)
#
# # if player chooses to play, initiate playHand
# if player_choice == 'u':
# playHand(hand, wordList, HAND_SIZE)
#
# # if players chooses computer to play, initiate compPlayHand
# elif player_choice == 'c':
# compPlayHand(hand, wordList, HAND_SIZE)
#
# # if player chooses another other input, print error message
# else:
# print 'Invalid command.'
#
## if player had chosen to replay the hand,
#elif game_choice == 'r' and hand != {}:
#
# # if player chooses to play, initiate playHand
# if player_choice == 'u':
# playHand(hand, wordList, HAND_SIZE)
#
# # if player chooses computer to play, initiate compPlayHand
# elif player_choice == 'c':
# compPlayHand(hand, wordList, HAND_SIZE)
#
# # if player chooses another other input, print error message
# else:
# print 'Invalid command.'
#
# Build data structures used for entire session and play game
#
if __name__ == '__main__':
wordList = loadWords()
playGame(wordList)
| mit |
DavidLP/home-assistant | tests/components/script/test_init.py | 10 | 8952 | """The tests for the Script component."""
# pylint: disable=protected-access
import unittest
from unittest.mock import patch, Mock
from homeassistant.components import script
from homeassistant.components.script import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_NAME, SERVICE_RELOAD, SERVICE_TOGGLE,
SERVICE_TURN_OFF, SERVICE_TURN_ON, EVENT_SCRIPT_STARTED)
from homeassistant.core import Context, callback, split_entity_id
from homeassistant.loader import bind_hass
from homeassistant.setup import setup_component, async_setup_component
from tests.common import get_test_home_assistant
ENTITY_ID = 'script.test'
@bind_hass
def turn_on(hass, entity_id, variables=None, context=None):
"""Turn script on.
This is a legacy helper method. Do not use it for new tests.
"""
_, object_id = split_entity_id(entity_id)
hass.services.call(DOMAIN, object_id, variables, context=context)
@bind_hass
def turn_off(hass, entity_id):
"""Turn script on.
This is a legacy helper method. Do not use it for new tests.
"""
hass.services.call(DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id})
@bind_hass
def toggle(hass, entity_id):
"""Toggle the script.
This is a legacy helper method. Do not use it for new tests.
"""
hass.services.call(DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity_id})
@bind_hass
def reload(hass):
"""Reload script component.
This is a legacy helper method. Do not use it for new tests.
"""
hass.services.call(DOMAIN, SERVICE_RELOAD)
class TestScriptComponent(unittest.TestCase):
"""Test the Script component."""
# pylint: disable=invalid-name
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
# pylint: disable=invalid-name
def tearDown(self):
"""Stop down everything that was started."""
self.hass.stop()
def test_setup_with_invalid_configs(self):
"""Test setup with invalid configs."""
for value in (
{'test': {}},
{
'test hello world': {
'sequence': [{'event': 'bla'}]
}
},
{
'test': {
'sequence': {
'event': 'test_event',
'service': 'homeassistant.turn_on',
}
}
},
):
assert not setup_component(self.hass, 'script', {
'script': value
}), 'Script loaded with wrong config {}'.format(value)
assert 0 == len(self.hass.states.entity_ids('script'))
def test_turn_on_service(self):
"""Verify that the turn_on service."""
event = 'test_event'
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.bus.listen(event, record_event)
assert setup_component(self.hass, 'script', {
'script': {
'test': {
'sequence': [{
'delay': {
'seconds': 5
}
}, {
'event': event,
}]
}
}
})
turn_on(self.hass, ENTITY_ID)
self.hass.block_till_done()
assert script.is_on(self.hass, ENTITY_ID)
assert 0 == len(events)
# Calling turn_on a second time should not advance the script
turn_on(self.hass, ENTITY_ID)
self.hass.block_till_done()
assert 0 == len(events)
turn_off(self.hass, ENTITY_ID)
self.hass.block_till_done()
assert not script.is_on(self.hass, ENTITY_ID)
assert 0 == len(events)
state = self.hass.states.get('group.all_scripts')
assert state is not None
assert state.attributes.get('entity_id') == (ENTITY_ID,)
def test_toggle_service(self):
"""Test the toggling of a service."""
event = 'test_event'
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
self.hass.bus.listen(event, record_event)
assert setup_component(self.hass, 'script', {
'script': {
'test': {
'sequence': [{
'delay': {
'seconds': 5
}
}, {
'event': event,
}]
}
}
})
toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
assert script.is_on(self.hass, ENTITY_ID)
assert 0 == len(events)
toggle(self.hass, ENTITY_ID)
self.hass.block_till_done()
assert not script.is_on(self.hass, ENTITY_ID)
assert 0 == len(events)
def test_passing_variables(self):
"""Test different ways of passing in variables."""
calls = []
context = Context()
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
self.hass.services.register('test', 'script', record_call)
assert setup_component(self.hass, 'script', {
'script': {
'test': {
'sequence': {
'service': 'test.script',
'data_template': {
'hello': '{{ greeting }}',
},
},
},
},
})
turn_on(self.hass, ENTITY_ID, {
'greeting': 'world'
}, context=context)
self.hass.block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data['hello'] == 'world'
self.hass.services.call('script', 'test', {
'greeting': 'universe',
}, context=context)
self.hass.block_till_done()
assert len(calls) == 2
assert calls[1].context is context
assert calls[1].data['hello'] == 'universe'
def test_reload_service(self):
"""Verify that the turn_on service."""
assert setup_component(self.hass, 'script', {
'script': {
'test': {
'sequence': [{
'delay': {
'seconds': 5
}
}]
}
}
})
assert self.hass.states.get(ENTITY_ID) is not None
assert self.hass.services.has_service(script.DOMAIN, 'test')
with patch('homeassistant.config.load_yaml_config_file', return_value={
'script': {
'test2': {
'sequence': [{
'delay': {
'seconds': 5
}
}]
}}}):
with patch('homeassistant.config.find_config_file',
return_value=''):
reload(self.hass)
self.hass.block_till_done()
assert self.hass.states.get(ENTITY_ID) is None
assert not self.hass.services.has_service(script.DOMAIN, 'test')
assert self.hass.states.get("script.test2") is not None
assert self.hass.services.has_service(script.DOMAIN, 'test2')
async def test_shared_context(hass):
"""Test that the shared context is passed down the chain."""
event = 'test_event'
context = Context()
event_mock = Mock()
run_mock = Mock()
hass.bus.async_listen(event, event_mock)
hass.bus.async_listen(EVENT_SCRIPT_STARTED, run_mock)
assert await async_setup_component(hass, 'script', {
'script': {
'test': {
'sequence': [
{'event': event}
]
}
}
})
await hass.services.async_call(DOMAIN, SERVICE_TURN_ON,
{ATTR_ENTITY_ID: ENTITY_ID},
context=context)
await hass.async_block_till_done()
assert event_mock.call_count == 1
assert run_mock.call_count == 1
args, kwargs = run_mock.call_args
assert args[0].context == context
# Ensure event data has all attributes set
assert args[0].data.get(ATTR_NAME) == 'test'
assert args[0].data.get(ATTR_ENTITY_ID) == 'script.test'
# Ensure context carries through the event
args, kwargs = event_mock.call_args
assert args[0].context == context
# Ensure the script state shares the same context
state = hass.states.get('script.test')
assert state is not None
assert state.context == context
| apache-2.0 |
ubolonton/DDF | python/ddf/ddf_manager.py | 1 | 1149 | """
Created on Jun 22, 2014
@author: nhanitvn
"""
from dataframe import DistributedDataFrame
from gateway import start_gateway_server
class DDFManager(object):
"""
Main entry point for DDF functionality. A SparkDDFManager can be used
to create DDFs that are implemented for Spark framework.
"""
_jdm = None
def __init__(self, engine_name):
"""
Constructor
:param engine_name: Name of the DDF engine, e.g. 'spark'
"""
_gateway = start_gateway_server()
self._jdm = _gateway.jvm.io.ddf.DDFManager.get(engine_name)
def sql(self, command):
"""
Execute a sql command and return a list of strings
:param command: the sql command to run
"""
return self._jdm.sql(command)
def sql2ddf(self, command):
"""
Create a DistributedDataFrame from an sql command.
:param command: the sql command to run
"""
return DistributedDataFrame(self._jdm.sql2ddf(command))
def shutdown(self):
"""
Shut down the DDF Manager
"""
self._jdm.shutdown()
print('Bye bye')
| apache-2.0 |
ma314smith/home-assistant | homeassistant/components/media_player/pandora.py | 5 | 13085 | """
Component for controlling Pandora stations through the pianobar client.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/media_player.pandora/
"""
import logging
import re
import os
import signal
from datetime import timedelta
import shutil
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.components.media_player import (
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, MEDIA_TYPE_MUSIC,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON,
SUPPORT_SELECT_SOURCE, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE,
SERVICE_MEDIA_PLAY, SERVICE_VOLUME_UP, SERVICE_VOLUME_DOWN,
MediaPlayerDevice)
from homeassistant.const import (STATE_OFF, STATE_PAUSED, STATE_PLAYING,
STATE_IDLE)
from homeassistant import util
REQUIREMENTS = ['pexpect==4.0.1']
_LOGGER = logging.getLogger(__name__)
# SUPPORT_VOLUME_SET is close to available but we need volume up/down
# controls in the GUI.
PANDORA_SUPPORT = \
SUPPORT_PAUSE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_NEXT_TRACK | \
SUPPORT_SELECT_SOURCE
CMD_MAP = {SERVICE_MEDIA_NEXT_TRACK: 'n',
SERVICE_MEDIA_PLAY_PAUSE: 'p',
SERVICE_MEDIA_PLAY: 'p',
SERVICE_VOLUME_UP: ')',
SERVICE_VOLUME_DOWN: '('}
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=2)
CURRENT_SONG_PATTERN = re.compile(r'"(.*?)"\s+by\s+"(.*?)"\son\s+"(.*?)"',
re.MULTILINE)
STATION_PATTERN = re.compile(r'Station\s"(.+?)"', re.MULTILINE)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the media player pandora platform."""
if not _pianobar_exists():
return False
pandora = PandoraMediaPlayer('Pandora')
# make sure we end the pandora subprocess on exit in case user doesn't
# power it down.
def _stop_pianobar(_event):
pandora.turn_off()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_pianobar)
add_devices([pandora])
class PandoraMediaPlayer(MediaPlayerDevice):
"""A media player that uses the Pianobar interface to Pandora."""
def __init__(self, name):
"""Initialize the demo device."""
MediaPlayerDevice.__init__(self)
self._name = name
self._player_state = STATE_OFF
self._station = ''
self._media_title = ''
self._media_artist = ''
self._media_album = ''
self._stations = []
self._time_remaining = 0
self._media_duration = 0
self._pianobar = None
@property
def should_poll(self):
"""Should be polled for current state."""
return True
@property
def name(self):
"""Return the name of the media player."""
return self._name
@property
def state(self):
"""Return the state of the player."""
return self._player_state
def turn_on(self):
"""Turn the media player on."""
import pexpect
if self._player_state != STATE_OFF:
return
self._pianobar = pexpect.spawn('pianobar')
_LOGGER.info('Started pianobar subprocess')
mode = self._pianobar.expect(['Receiving new playlist',
'Select station:',
'Email:'])
if mode == 1:
# station list was presented. dismiss it.
self._pianobar.sendcontrol('m')
elif mode == 2:
_LOGGER.warning('The pianobar client is not configured to log in. '
'Please create a config file for it as described '
'at https://home-assistant.io'
'/components/media_player.pandora/')
# pass through the email/password prompts to quit cleanly
self._pianobar.sendcontrol('m')
self._pianobar.sendcontrol('m')
self._pianobar.terminate()
self._pianobar = None
return
self._update_stations()
self.update_playing_status()
self._player_state = STATE_IDLE
self.schedule_update_ha_state()
def turn_off(self):
"""Turn the media player off."""
import pexpect
if self._pianobar is None:
_LOGGER.info('Pianobar subprocess already stopped')
return
self._pianobar.send('q')
try:
_LOGGER.info('Stopped Pianobar subprocess')
self._pianobar.terminate()
except pexpect.exceptions.TIMEOUT:
# kill the process group
os.killpg(os.getpgid(self._pianobar.pid), signal.SIGTERM)
_LOGGER.info('Killed Pianobar subprocess')
self._pianobar = None
self._player_state = STATE_OFF
self.schedule_update_ha_state()
def media_play(self):
"""Send play command."""
self._send_pianobar_command(SERVICE_MEDIA_PLAY_PAUSE)
self._player_state = STATE_PLAYING
self.schedule_update_ha_state()
def media_pause(self):
"""Send pause command."""
self._send_pianobar_command(SERVICE_MEDIA_PLAY_PAUSE)
self._player_state = STATE_PAUSED
self.schedule_update_ha_state()
def media_next_track(self):
"""Go to next track."""
self._send_pianobar_command(SERVICE_MEDIA_NEXT_TRACK)
self.schedule_update_ha_state()
@property
def supported_media_commands(self):
"""Show what this supports."""
return PANDORA_SUPPORT
@property
def source(self):
"""Name of the current input source."""
return self._station
@property
def source_list(self):
"""List of available input sources."""
return self._stations
@property
def media_title(self):
"""Title of current playing media."""
self.update_playing_status()
return self._media_title
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
return self._media_artist
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
return self._media_album
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
return self._media_duration
def select_source(self, source):
"""Choose a different Pandora station and play it."""
try:
station_index = self._stations.index(source)
except ValueError:
_LOGGER.warning('Station `%s` is not in list', source)
return
_LOGGER.info('Setting station %s, %d', source, station_index)
self._send_station_list_command()
self._pianobar.sendline('{}'.format(station_index))
self._pianobar.expect('\r\n')
self._player_state = STATE_PLAYING
def _send_station_list_command(self):
"""Send a station list command."""
import pexpect
self._pianobar.send('s')
try:
self._pianobar.expect('Select station:', timeout=1)
except pexpect.exceptions.TIMEOUT:
# try again. Buffer was contaminated.
self._clear_buffer()
self._pianobar.send('s')
self._pianobar.expect('Select station:')
def update_playing_status(self):
"""Query pianobar for info about current media_title, station."""
response = self._query_for_playing_status()
if not response:
return
self._update_current_station(response)
self._update_current_song(response)
self._update_song_position()
def _query_for_playing_status(self):
"""Query system for info about current track."""
import pexpect
self._clear_buffer()
self._pianobar.send('i')
try:
match_idx = self._pianobar.expect([br'(\d\d):(\d\d)/(\d\d):(\d\d)',
'No song playing',
'Select station',
'Receiving new playlist'])
except pexpect.exceptions.EOF:
_LOGGER.info('Pianobar process already exited.')
return None
self._log_match()
if match_idx == 1:
# idle.
response = None
elif match_idx == 2:
# stuck on a station selection dialog. Clear it.
_LOGGER.warning('On unexpected station list page.')
self._pianobar.sendcontrol('m') # press enter
self._pianobar.sendcontrol('m') # do it again b/c an 'i' got in
response = self.update_playing_status()
elif match_idx == 3:
_LOGGER.debug('Received new playlist list.')
response = self.update_playing_status()
else:
response = self._pianobar.before.decode('utf-8')
return response
def _update_current_station(self, response):
"""Update current station."""
station_match = re.search(STATION_PATTERN, response)
if station_match:
self._station = station_match.group(1)
_LOGGER.debug('Got station as: %s', self._station)
else:
_LOGGER.warning('No station match. ')
def _update_current_song(self, response):
"""Update info about current song."""
song_match = re.search(CURRENT_SONG_PATTERN, response)
if song_match:
(self._media_title, self._media_artist,
self._media_album) = song_match.groups()
_LOGGER.debug('Got song as: %s', self._media_title)
else:
_LOGGER.warning('No song match.')
@util.Throttle(MIN_TIME_BETWEEN_UPDATES)
def _update_song_position(self):
"""
Get the song position and duration.
It's hard to predict whether or not the music will start during init
so we have to detect state by checking the ticker.
"""
(cur_minutes, cur_seconds,
total_minutes, total_seconds) = self._pianobar.match.groups()
time_remaining = int(cur_minutes) * 60 + int(cur_seconds)
self._media_duration = int(total_minutes) * 60 + int(total_seconds)
if (time_remaining != self._time_remaining and
time_remaining != self._media_duration):
self._player_state = STATE_PLAYING
elif self._player_state == STATE_PLAYING:
self._player_state = STATE_PAUSED
self._time_remaining = time_remaining
def _log_match(self):
"""Log grabbed values from console."""
_LOGGER.debug('Before: %s\nMatch: %s\nAfter: %s',
repr(self._pianobar.before),
repr(self._pianobar.match),
repr(self._pianobar.after))
def _send_pianobar_command(self, service_cmd):
"""Send a command to Pianobar."""
command = CMD_MAP.get(service_cmd)
_LOGGER.debug('Sending pinaobar command %s for %s',
command, service_cmd)
if command is None:
_LOGGER.info('Command %s not supported yet', service_cmd)
self._clear_buffer()
self._pianobar.sendline(command)
def _update_stations(self):
"""List defined Pandora stations."""
self._send_station_list_command()
station_lines = self._pianobar.before.decode('utf-8')
_LOGGER.debug('Getting stations: %s', station_lines)
self._stations = []
for line in station_lines.split('\r\n'):
match = re.search(r'\d+\).....(.+)', line)
if match:
station = match.group(1).strip()
_LOGGER.debug('Found station %s', station)
self._stations.append(station)
else:
_LOGGER.debug('No station match on `%s`', line)
self._pianobar.sendcontrol('m') # press enter with blank line
self._pianobar.sendcontrol('m') # do it twice in case an 'i' got in
def _clear_buffer(self):
"""
Clear buffer from pexpect.
This is necessary because there are a bunch of 00:00 in the buffer
"""
import pexpect
try:
while not self._pianobar.expect('.+', timeout=0.1):
pass
except pexpect.exceptions.TIMEOUT:
pass
except pexpect.exceptions.EOF:
pass
def _pianobar_exists():
"""Verify that Pianobar is properly installed."""
pianobar_exe = shutil.which('pianobar')
if pianobar_exe:
return True
else:
_LOGGER.warning('The Pandora component depends on the Pianobar '
'client, which cannot be found. Please install '
'using instructions at'
'https://home-assistant.io'
'/components/media_player.pandora/')
return False
| mit |
elahesadatnaghib/feature-based-scheduler | DBreadwrite.py | 1 | 11137 | __author__ = 'Elahe'
import sqlite3 as lite
import numpy as np
import ephem
''' Connect to the FBDE data base '''
def DBreadNwrite(key, Date):
if key == 'w':
FBDEcon = lite.connect('FBDE.db')
FBDEcur = FBDEcon.cursor()
# avoid overwrite
try:
FBDEcur.execute('SELECT * FROM NightSummary ORDER BY Night_count DESC LIMIT 1')
last_row_ns = FBDEcur.fetchone()
t_start_db = last_row_ns[1]
t_end_db = last_row_ns[2]
if int(t_start_db + t_end_db)/2 == int(Date):
print('This night is already recorded in the database')
return
except:
print('Database created just now')
# avoid dropping a night
try:
if int(t_start_db + t_end_db)/2 < int(Date) -1:
print('One or more night(s) are missing')
return
if int(t_start_db + t_end_db)/2 > int(Date) -1:
print('Last recorded night is after the intended night')
return
except:
pass
FBDEcur.execute('CREATE TABLE IF NOT EXISTS Schedule('
'Visit_count INTEGER, '
'Field_id INTEGER, '
'ephemDate REAL, '
'Filter INTEGER, '
'n_ton INTEGER, '
'n_previous INEGER, '
'Cost REAL, '
'Slew_t REAL, '
't_since_v_ton REAL,'
't_since_v_prev REAL,'
'Alt REAL, '
'HA REAL, '
't_to_invis REAL, '
'Sky_bri REAL, '
'Temp_coverage REAL)')
FBDEcur.execute('CREATE TABLE IF NOT EXISTS NightSummary('
'Night_count INTEGER, '
'T_start REAL, '
'T_end REAL, '
'Initial_field, '
'N_visits INTEGER, '
'N_triple INTEGER, '
'N_double INTEGER, '
'N_single INTEGER, '
'N_per_hour REAL, '
'Avg_cost REAL, '
'Avg_slew_t REAL, '
'Avg_alt REAL, '
'Avg_ha REAL)')
FBDEcur.execute('CREATE TABLE IF NOT EXISTS FieldsStatistics('
'ID INTEGER, '
'Dec REAL, '
'RA REAL, '
'Fourth_last_visit REAL, '
'Third_last_visit REAL, '
'Second_last_visit REAL, '
'Last_visit REAL, '
'N_visit INTEGER, '
'Coadded_depth REAL, '
'Avg_cost REAL, '
'Avg_slew_t REAL, '
'Avg_alt REAL, '
'Avg_ha REAL)')
FBDEcur.execute('CREATE TABLE IF NOT EXISTS Watch('
'Visit_count INTEGER, '
'ID INTEGER,'
'ephemDate,'
'F1,'
'F2,'
'F3,'
'F4,'
'F5,'
'F6,'
'F7)')
Watch = np.load("Output/Watch{}.npy".format(int(ephem.julian_date(Date))))
Schedule = np.load("Output/Schedule{}.npy".format(int(ephem.julian_date(Date))))
Summary = np.load("Output/Summary{}.npy".format(int(ephem.julian_date(Date))))
# 3 by n_fields matrix of ID, RA, Dec
all_fields = np.loadtxt("NightDataInLIS/Constants/fieldID.lis", dtype = "i4, f8, f8", unpack = True)
N_visits = np.count_nonzero(Schedule['Field_id'])
''' Update the SCHEDULE db'''
# Import last row of the data base
try:
FBDEcur.execute('SELECT * FROM Schedule ORDER BY Visit_count DESC LIMIT 1')
last_row_sch = FBDEcur.fetchone()
Visit_count = last_row_sch[0]
except:
Visit_count = 0
for index in range(N_visits):
Visit_count += 1
Field_id = Schedule[index]['Field_id']
ephemDate = Schedule[index]['ephemDate']
Filter = Schedule[index]['Filter']
n_ton = Schedule[index]['n_ton']
n_last = Schedule[index]['n_last']
Cost = Schedule[index]['Cost']
Slew_t = Schedule[index]['Slew_t']
t_since_v_ton = Schedule[index]['t_since_v_ton']
t_since_v_last= Schedule[index]['t_since_v_last']
Alt = Schedule[index]['Alt']
HA = Schedule[index]['HA']
t_to_invis = Schedule[index]['t_to_invis']
Sky_bri = Schedule[index]['Sky_bri']
Temp_coverage = Schedule[index]['Temp_coverage']
FBDEcur.execute('INSERT INTO Schedule VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
(Visit_count, Field_id, ephemDate, Filter, n_ton, n_last, Cost, Slew_t/ephem.second, t_since_v_ton, t_since_v_last, Alt, HA, t_to_invis, Sky_bri, Temp_coverage))
FBDEcon.commit()
''' Update the NIGHT SUMMARY db'''
# Import last row of the data base
try:
FBDEcur.execute('SELECT * FROM NightSummary ORDER BY Night_count DESC LIMIT 1')
last_row_ns = FBDEcur.fetchone()
Night_count = last_row_ns[0]
except:
Night_count = 0
Night_count += 1
T_start = Summary[0]
T_end = Summary[1]
Initial_field = Summary[2]
N_visits = N_visits
u, c = np.unique(Schedule['Field_id'], return_counts=True)
unique, counts = np.unique(c, return_counts=True)
try:
N_triple = counts[unique == 3][0]
except:
N_triple = 0
try:
N_double = counts[unique == 2][0]
except:
N_double = 0
try:
N_single = counts[unique == 1][0]
except:
N_single = 0
N_per_hour = N_visits * ephem.hour/ (T_end - T_start)
Avg_cost = np.average(Schedule[0:N_visits]['Cost'])
Avg_slew_t = np.average(Schedule[0:N_visits]['Slew_t'])
Avg_alt = np.average(Schedule[0:N_visits]['Alt'])
Avg_ha = np.average(Schedule[0:N_visits]['HA'])
FBDEcur.execute('INSERT INTO NightSummary VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
(Night_count, T_start, T_end, Initial_field, N_visits, N_triple, N_double, N_single, N_per_hour, Avg_cost, Avg_slew_t/ephem.second, Avg_alt, Avg_ha))
FBDEcon.commit()
''' Update the FIELDS STATISTICS db'''
try:
FBDEcur.execute('SELECT * FROM FieldsStatistics ORDER BY ID DESC LIMIT 1')
last_ID = FBDEcur.fetchone()[0]
except: # Initialize FieldsStatistics
for field in np.transpose(all_fields):
ID = field[0]
RA = field[1]
Dec= field[2]
FBDEcur.execute('INSERT INTO FieldsStatistics VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
(ID, RA, Dec, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
FBDEcon.commit()
for index, id in enumerate(Schedule[0:N_visits]['Field_id']):
FBDEcur.execute('SELECT * FROM FieldsStatistics WHERE ID = ?',(id,))
field_row = FBDEcur.fetchone()
previous_Fourth_last_visit = field_row[3] # thrown away
previous_Third_last_visit = field_row[4]
previous_Second_last_visit = field_row[5]
previous_Last_visit = field_row[6]
previous_N_visit = field_row[7]
previous_Coadded_depth = field_row[8]
previous_Avg_cost = field_row[9]
previous_Avg_slew_t = field_row[10]
previous_Avg_alt = field_row[11]
previous_Avg_ha = field_row[12]
Fourth_last_visit = previous_Third_last_visit
Third_last_visit = previous_Second_last_visit
Second_last_visit = previous_Last_visit
Last_visit = Schedule[index]['ephemDate']
N_visit = previous_N_visit + 1
Coadded_depth = previous_Coadded_depth + 0 # temporarily
Avg_cost = ((previous_Avg_cost * previous_N_visit) + Schedule[index]['Cost'])/N_visit
Avg_slew_t = ((previous_Avg_slew_t * previous_N_visit) + Schedule[index]['Slew_t'])/N_visit
Avg_alt = ((previous_Avg_alt * previous_N_visit) + Schedule[index]['Alt'])/N_visit
Avg_ha = ((previous_Avg_ha * previous_N_visit) + Schedule[index]['HA'])/N_visit
FBDEcur.execute('UPDATE FieldsStatistics SET '
'Fourth_last_visit = ?, '
'Third_last_visit = ?, '
'Second_last_visit = ?, '
'Last_visit = ?, '
'N_visit = ?, '
'Coadded_depth = ?, '
'Avg_cost = ?, '
'Avg_slew_t = ?, '
'Avg_alt = ?, '
'Avg_ha = ? WHERE ID = ?',
(Fourth_last_visit, Third_last_visit, Second_last_visit, Last_visit, N_visit, Coadded_depth, Avg_cost, Avg_slew_t/ephem.second, Avg_alt, Avg_ha, id))
FBDEcon.commit()
''' Update the WATCH db'''
# Import last row of the data base
try:
FBDEcur.execute('SELECT * FROM Watch ORDER BY Visit_count DESC LIMIT 1')
last_row_sch = FBDEcur.fetchone()
Visit_count = last_row_sch[0]
except:
Visit_count = 0
for index in range(N_visits):
Visit_count += 1
Field_id = Watch[index]['Field_id']
ephemDate = Watch[index]['ephemDate']
F1 = Watch[index]['F1']
F2 = Watch[index]['F2']
F3 = Watch[index]['F3']
F4 = Watch[index]['F4']
F5 = Watch[index]['F5']
F6 = Watch[index]['F6']
F7 = Watch[index]['F7']
FBDEcur.execute('INSERT INTO Watch VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
(Visit_count, Field_id, ephemDate, F1, F2, F3, F4, F5, F6, F7))
FBDEcon.commit()
return
if key == 'r':
return
return
'''
Date = ephem.Date('2016/09/01 12:00:00.00') # times are in UT
DBreadNwrite('w', Date)
''' | mit |
thenewguy/django-shop | shop/util/fields.py | 5 | 1163 | # -*- coding: utf-8 -*-
from decimal import Decimal
from django.db.models.fields import DecimalField
class CurrencyField(DecimalField):
"""
A CurrencyField is simply a subclass of DecimalField with a fixed format:
max_digits = 30, decimal_places=10, and defaults to 0.00
"""
def __init__(self, **kwargs):
defaults = {
'max_digits': 30,
'decimal_places': 2,
'default': Decimal('0.0')
}
defaults.update(kwargs)
super(CurrencyField, self).__init__(**defaults)
def south_field_triple(self): # pragma: no cover
"""
Returns a suitable description of this field for South.
This is excluded from coverage reports since it is pretty much a piece
of South itself, and does not influence program behavior at all in
case we don't use South.
"""
# We'll just introspect the _actual_ field.
from south.modelsinspector import introspector
field_class = "django.db.models.fields.DecimalField"
args, kwargs = introspector(self)
# That's our definition!
return (field_class, args, kwargs)
| bsd-3-clause |
dehuinet/mosquitto | test/broker/03-publish-qos1.py | 18 | 1223 | #!/usr/bin/env python
# Test whether a PUBLISH to a topic with QoS 1 results in the correct PUBACK packet.
import subprocess
import socket
import time
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
mid = 19
keepalive = 60
connect_packet = mosq_test.gen_connect("pub-qos1-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
publish_packet = mosq_test.gen_publish("pub/qos1/test", qos=1, mid=mid, payload="message")
puback_packet = mosq_test.gen_puback(mid)
broker = subprocess.Popen(['../../src/mosquitto', '-p', '1888'], stderr=subprocess.PIPE)
try:
time.sleep(0.5)
sock = mosq_test.do_client_connect(connect_packet, connack_packet)
sock.send(publish_packet)
if mosq_test.expect_packet(sock, "puback", puback_packet):
rc = 0
sock.close()
finally:
broker.terminate()
broker.wait()
if rc:
(stdo, stde) = broker.communicate()
print(stde)
exit(rc)
| bsd-3-clause |
haroldl/homeworklog | django/contrib/gis/tests/relatedapp/models.py | 274 | 1686 | from django.contrib.gis.db import models
from django.contrib.localflavor.us.models import USStateField
class Location(models.Model):
point = models.PointField()
objects = models.GeoManager()
def __unicode__(self): return self.point.wkt
class City(models.Model):
name = models.CharField(max_length=50)
state = USStateField()
location = models.ForeignKey(Location)
objects = models.GeoManager()
def __unicode__(self): return self.name
class AugmentedLocation(Location):
extra_text = models.TextField(blank=True)
objects = models.GeoManager()
class DirectoryEntry(models.Model):
listing_text = models.CharField(max_length=50)
location = models.ForeignKey(AugmentedLocation)
objects = models.GeoManager()
class Parcel(models.Model):
name = models.CharField(max_length=30)
city = models.ForeignKey(City)
center1 = models.PointField()
# Throwing a curveball w/`db_column` here.
center2 = models.PointField(srid=2276, db_column='mycenter')
border1 = models.PolygonField()
border2 = models.PolygonField(srid=2276)
objects = models.GeoManager()
def __unicode__(self): return self.name
# These use the GeoManager but do not have any geographic fields.
class Author(models.Model):
name = models.CharField(max_length=100)
objects = models.GeoManager()
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, unique=True)
objects = models.GeoManager()
class Book(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, related_name='books', null=True)
objects = models.GeoManager()
| bsd-3-clause |
SergeySatskiy/codimension | codimension/profiling/profiletest.py | 1 | 1537 | #
# -*- coding: utf-8 -*-
#
# codimension - graphics python two-way code editor and analyzer
# Copyright (C) 2010-2012 Sergey Satskiy sergey.satskiy@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Profiler test"""
# import sys
import time
x = 0
while False:
time.sleep(0.1)
x += 1
v = range(175)
z = (x*x for x in range(10))
c = compile('sum([1, 2, 3])', '', 'single')
e = Ellipsis
raise Exception('dkdkdkd')
try:
raise Exception('hey')
except Exception as exc:
import sys
x1, x2, x3 = sys.exc_info()
class A:
def __init__(self):
self.__d = 10
self.d = 20
def f():
pass
@property
def x(self):
return self.__x
def f( bla ):
" F function docstring "
if bla == -1:
return -1
# if bla >= 5:
# return -1
return f( bla + 1 )
def g( foo ):
" g function doc "
f( foo )
a = A()
b = set()
c = {1: '1', 2: '2'}
f( 0 )
g( -1 )
| gpl-3.0 |
blademainer/intellij-community | python/lib/Lib/site-packages/django/conf/locale/ml/formats.py | 341 | 1635 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
TIME_FORMAT = 'P'
DATETIME_FORMAT = 'N j, Y, P'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'F j'
SHORT_DATE_FORMAT = 'm/d/Y'
SHORT_DATETIME_FORMAT = 'm/d/Y P'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| apache-2.0 |
bpramod/azure-linux-extensions | DSC/azure/http/winhttp.py | 52 | 15151 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from ctypes import (
c_void_p,
c_long,
c_ulong,
c_longlong,
c_ulonglong,
c_short,
c_ushort,
c_wchar_p,
c_byte,
byref,
Structure,
Union,
POINTER,
WINFUNCTYPE,
HRESULT,
oledll,
WinDLL,
)
import ctypes
import sys
if sys.version_info >= (3,):
def unicode(text):
return text
#------------------------------------------------------------------------------
# Constants that are used in COM operations
VT_EMPTY = 0
VT_NULL = 1
VT_I2 = 2
VT_I4 = 3
VT_BSTR = 8
VT_BOOL = 11
VT_I1 = 16
VT_UI1 = 17
VT_UI2 = 18
VT_UI4 = 19
VT_I8 = 20
VT_UI8 = 21
VT_ARRAY = 8192
HTTPREQUEST_PROXYSETTING_PROXY = 2
HTTPREQUEST_SETCREDENTIALS_FOR_PROXY = 1
HTTPREQUEST_PROXY_SETTING = c_long
HTTPREQUEST_SETCREDENTIALS_FLAGS = c_long
#------------------------------------------------------------------------------
# Com related APIs that are used.
_ole32 = oledll.ole32
_oleaut32 = WinDLL('oleaut32')
_CLSIDFromString = _ole32.CLSIDFromString
_CoInitialize = _ole32.CoInitialize
_CoInitialize.argtypes = [c_void_p]
_CoCreateInstance = _ole32.CoCreateInstance
_SysAllocString = _oleaut32.SysAllocString
_SysAllocString.restype = c_void_p
_SysAllocString.argtypes = [c_wchar_p]
_SysFreeString = _oleaut32.SysFreeString
_SysFreeString.argtypes = [c_void_p]
# SAFEARRAY*
# SafeArrayCreateVector(_In_ VARTYPE vt,_In_ LONG lLbound,_In_ ULONG
# cElements);
_SafeArrayCreateVector = _oleaut32.SafeArrayCreateVector
_SafeArrayCreateVector.restype = c_void_p
_SafeArrayCreateVector.argtypes = [c_ushort, c_long, c_ulong]
# HRESULT
# SafeArrayAccessData(_In_ SAFEARRAY *psa, _Out_ void **ppvData);
_SafeArrayAccessData = _oleaut32.SafeArrayAccessData
_SafeArrayAccessData.argtypes = [c_void_p, POINTER(c_void_p)]
# HRESULT
# SafeArrayUnaccessData(_In_ SAFEARRAY *psa);
_SafeArrayUnaccessData = _oleaut32.SafeArrayUnaccessData
_SafeArrayUnaccessData.argtypes = [c_void_p]
# HRESULT
# SafeArrayGetUBound(_In_ SAFEARRAY *psa, _In_ UINT nDim, _Out_ LONG
# *plUbound);
_SafeArrayGetUBound = _oleaut32.SafeArrayGetUBound
_SafeArrayGetUBound.argtypes = [c_void_p, c_ulong, POINTER(c_long)]
#------------------------------------------------------------------------------
class BSTR(c_wchar_p):
''' BSTR class in python. '''
def __init__(self, value):
super(BSTR, self).__init__(_SysAllocString(value))
def __del__(self):
_SysFreeString(self)
class VARIANT(Structure):
'''
VARIANT structure in python. Does not match the definition in
MSDN exactly & it is only mapping the used fields. Field names are also
slighty different.
'''
class _tagData(Union):
class _tagRecord(Structure):
_fields_ = [('pvoid', c_void_p), ('precord', c_void_p)]
_fields_ = [('llval', c_longlong),
('ullval', c_ulonglong),
('lval', c_long),
('ulval', c_ulong),
('ival', c_short),
('boolval', c_ushort),
('bstrval', BSTR),
('parray', c_void_p),
('record', _tagRecord)]
_fields_ = [('vt', c_ushort),
('wReserved1', c_ushort),
('wReserved2', c_ushort),
('wReserved3', c_ushort),
('vdata', _tagData)]
@staticmethod
def create_empty():
variant = VARIANT()
variant.vt = VT_EMPTY
variant.vdata.llval = 0
return variant
@staticmethod
def create_safearray_from_str(text):
variant = VARIANT()
variant.vt = VT_ARRAY | VT_UI1
length = len(text)
variant.vdata.parray = _SafeArrayCreateVector(VT_UI1, 0, length)
pvdata = c_void_p()
_SafeArrayAccessData(variant.vdata.parray, byref(pvdata))
ctypes.memmove(pvdata, text, length)
_SafeArrayUnaccessData(variant.vdata.parray)
return variant
@staticmethod
def create_bstr_from_str(text):
variant = VARIANT()
variant.vt = VT_BSTR
variant.vdata.bstrval = BSTR(text)
return variant
@staticmethod
def create_bool_false():
variant = VARIANT()
variant.vt = VT_BOOL
variant.vdata.boolval = 0
return variant
def is_safearray_of_bytes(self):
return self.vt == VT_ARRAY | VT_UI1
def str_from_safearray(self):
assert self.vt == VT_ARRAY | VT_UI1
pvdata = c_void_p()
count = c_long()
_SafeArrayGetUBound(self.vdata.parray, 1, byref(count))
count = c_long(count.value + 1)
_SafeArrayAccessData(self.vdata.parray, byref(pvdata))
text = ctypes.string_at(pvdata, count)
_SafeArrayUnaccessData(self.vdata.parray)
return text
def __del__(self):
_VariantClear(self)
# HRESULT VariantClear(_Inout_ VARIANTARG *pvarg);
_VariantClear = _oleaut32.VariantClear
_VariantClear.argtypes = [POINTER(VARIANT)]
class GUID(Structure):
''' GUID structure in python. '''
_fields_ = [("data1", c_ulong),
("data2", c_ushort),
("data3", c_ushort),
("data4", c_byte * 8)]
def __init__(self, name=None):
if name is not None:
_CLSIDFromString(unicode(name), byref(self))
class _WinHttpRequest(c_void_p):
'''
Maps the Com API to Python class functions. Not all methods in
IWinHttpWebRequest are mapped - only the methods we use.
'''
_AddRef = WINFUNCTYPE(c_long) \
(1, 'AddRef')
_Release = WINFUNCTYPE(c_long) \
(2, 'Release')
_SetProxy = WINFUNCTYPE(HRESULT,
HTTPREQUEST_PROXY_SETTING,
VARIANT,
VARIANT) \
(7, 'SetProxy')
_SetCredentials = WINFUNCTYPE(HRESULT,
BSTR,
BSTR,
HTTPREQUEST_SETCREDENTIALS_FLAGS) \
(8, 'SetCredentials')
_Open = WINFUNCTYPE(HRESULT, BSTR, BSTR, VARIANT) \
(9, 'Open')
_SetRequestHeader = WINFUNCTYPE(HRESULT, BSTR, BSTR) \
(10, 'SetRequestHeader')
_GetResponseHeader = WINFUNCTYPE(HRESULT, BSTR, POINTER(c_void_p)) \
(11, 'GetResponseHeader')
_GetAllResponseHeaders = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(12, 'GetAllResponseHeaders')
_Send = WINFUNCTYPE(HRESULT, VARIANT) \
(13, 'Send')
_Status = WINFUNCTYPE(HRESULT, POINTER(c_long)) \
(14, 'Status')
_StatusText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(15, 'StatusText')
_ResponseText = WINFUNCTYPE(HRESULT, POINTER(c_void_p)) \
(16, 'ResponseText')
_ResponseBody = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \
(17, 'ResponseBody')
_ResponseStream = WINFUNCTYPE(HRESULT, POINTER(VARIANT)) \
(18, 'ResponseStream')
_WaitForResponse = WINFUNCTYPE(HRESULT, VARIANT, POINTER(c_ushort)) \
(21, 'WaitForResponse')
_Abort = WINFUNCTYPE(HRESULT) \
(22, 'Abort')
_SetTimeouts = WINFUNCTYPE(HRESULT, c_long, c_long, c_long, c_long) \
(23, 'SetTimeouts')
_SetClientCertificate = WINFUNCTYPE(HRESULT, BSTR) \
(24, 'SetClientCertificate')
def open(self, method, url):
'''
Opens the request.
method: the request VERB 'GET', 'POST', etc.
url: the url to connect
'''
_WinHttpRequest._SetTimeouts(self, 0, 65000, 65000, 65000)
flag = VARIANT.create_bool_false()
_method = BSTR(method)
_url = BSTR(url)
_WinHttpRequest._Open(self, _method, _url, flag)
def set_request_header(self, name, value):
''' Sets the request header. '''
_name = BSTR(name)
_value = BSTR(value)
_WinHttpRequest._SetRequestHeader(self, _name, _value)
def get_all_response_headers(self):
''' Gets back all response headers. '''
bstr_headers = c_void_p()
_WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers))
bstr_headers = ctypes.cast(bstr_headers, c_wchar_p)
headers = bstr_headers.value
_SysFreeString(bstr_headers)
return headers
def send(self, request=None):
''' Sends the request body. '''
# Sends VT_EMPTY if it is GET, HEAD request.
if request is None:
var_empty = VARIANT.create_empty()
_WinHttpRequest._Send(self, var_empty)
else: # Sends request body as SAFEArray.
_request = VARIANT.create_safearray_from_str(request)
_WinHttpRequest._Send(self, _request)
def status(self):
''' Gets status of response. '''
status = c_long()
_WinHttpRequest._Status(self, byref(status))
return int(status.value)
def status_text(self):
''' Gets status text of response. '''
bstr_status_text = c_void_p()
_WinHttpRequest._StatusText(self, byref(bstr_status_text))
bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p)
status_text = bstr_status_text.value
_SysFreeString(bstr_status_text)
return status_text
def response_body(self):
'''
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
If it is an xml file, it always contains 3 characters before <?xml,
so we remove them.
'''
var_respbody = VARIANT()
_WinHttpRequest._ResponseBody(self, byref(var_respbody))
if var_respbody.is_safearray_of_bytes():
respbody = var_respbody.str_from_safearray()
if respbody[3:].startswith(b'<?xml') and\
respbody.startswith(b'\xef\xbb\xbf'):
respbody = respbody[3:]
return respbody
else:
return ''
def set_client_certificate(self, certificate):
'''Sets client certificate for the request. '''
_certificate = BSTR(certificate)
_WinHttpRequest._SetClientCertificate(self, _certificate)
def set_tunnel(self, host, port):
''' Sets up the host and the port for the HTTP CONNECT Tunnelling.'''
url = host
if port:
url = url + u':' + port
var_host = VARIANT.create_bstr_from_str(url)
var_empty = VARIANT.create_empty()
_WinHttpRequest._SetProxy(
self, HTTPREQUEST_PROXYSETTING_PROXY, var_host, var_empty)
def set_proxy_credentials(self, user, password):
_WinHttpRequest._SetCredentials(
self, BSTR(user), BSTR(password),
HTTPREQUEST_SETCREDENTIALS_FOR_PROXY)
def __del__(self):
if self.value is not None:
_WinHttpRequest._Release(self)
class _Response(object):
''' Response class corresponding to the response returned from httplib
HTTPConnection. '''
def __init__(self, _status, _status_text, _length, _headers, _respbody):
self.status = _status
self.reason = _status_text
self.length = _length
self.headers = _headers
self.respbody = _respbody
def getheaders(self):
'''Returns response headers.'''
return self.headers
def read(self, _length):
'''Returns resonse body. '''
return self.respbody[:_length]
class _HTTPConnection(object):
''' Class corresponding to httplib HTTPConnection class. '''
def __init__(self, host, cert_file=None, key_file=None, protocol='http'):
''' initialize the IWinHttpWebRequest Com Object.'''
self.host = unicode(host)
self.cert_file = cert_file
self._httprequest = _WinHttpRequest()
self.protocol = protocol
clsid = GUID('{2087C2F4-2CEF-4953-A8AB-66779B670495}')
iid = GUID('{016FE2EC-B2C8-45F8-B23B-39E53A75396B}')
_CoInitialize(None)
_CoCreateInstance(byref(clsid), 0, 1, byref(iid),
byref(self._httprequest))
def close(self):
pass
def set_tunnel(self, host, port=None, headers=None):
''' Sets up the host and the port for the HTTP CONNECT Tunnelling. '''
self._httprequest.set_tunnel(unicode(host), unicode(str(port)))
def set_proxy_credentials(self, user, password):
self._httprequest.set_proxy_credentials(
unicode(user), unicode(password))
def putrequest(self, method, uri):
''' Connects to host and sends the request. '''
protocol = unicode(self.protocol + '://')
url = protocol + self.host + unicode(uri)
self._httprequest.open(unicode(method), url)
# sets certificate for the connection if cert_file is set.
if self.cert_file is not None:
self._httprequest.set_client_certificate(unicode(self.cert_file))
def putheader(self, name, value):
''' Sends the headers of request. '''
if sys.version_info < (3,):
name = str(name).decode('utf-8')
value = str(value).decode('utf-8')
self._httprequest.set_request_header(name, value)
def endheaders(self):
''' No operation. Exists only to provide the same interface of httplib
HTTPConnection.'''
pass
def send(self, request_body):
''' Sends request body. '''
if not request_body:
self._httprequest.send()
else:
self._httprequest.send(request_body)
def getresponse(self):
''' Gets the response and generates the _Response object'''
status = self._httprequest.status()
status_text = self._httprequest.status_text()
resp_headers = self._httprequest.get_all_response_headers()
fixed_headers = []
for resp_header in resp_headers.split('\n'):
if (resp_header.startswith('\t') or\
resp_header.startswith(' ')) and fixed_headers:
# append to previous header
fixed_headers[-1] += resp_header
else:
fixed_headers.append(resp_header)
headers = []
for resp_header in fixed_headers:
if ':' in resp_header:
pos = resp_header.find(':')
headers.append(
(resp_header[:pos].lower(), resp_header[pos + 1:].strip()))
body = self._httprequest.response_body()
length = len(body)
return _Response(status, status_text, length, headers, body)
| apache-2.0 |
Zhaoyanzhang/-myflasky | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/charsetgroupprober.py | 2929 | 3791 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
from .charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mActiveNum = 0
self._mProbers = []
self._mBestGuessProber = None
def reset(self):
CharSetProber.reset(self)
self._mActiveNum = 0
for prober in self._mProbers:
if prober:
prober.reset()
prober.active = True
self._mActiveNum += 1
self._mBestGuessProber = None
def get_charset_name(self):
if not self._mBestGuessProber:
self.get_confidence()
if not self._mBestGuessProber:
return None
# self._mBestGuessProber = self._mProbers[0]
return self._mBestGuessProber.get_charset_name()
def feed(self, aBuf):
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
continue
st = prober.feed(aBuf)
if not st:
continue
if st == constants.eFoundIt:
self._mBestGuessProber = prober
return self.get_state()
elif st == constants.eNotMe:
prober.active = False
self._mActiveNum -= 1
if self._mActiveNum <= 0:
self._mState = constants.eNotMe
return self.get_state()
return self.get_state()
def get_confidence(self):
st = self.get_state()
if st == constants.eFoundIt:
return 0.99
elif st == constants.eNotMe:
return 0.01
bestConf = 0.0
self._mBestGuessProber = None
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
if constants._debug:
sys.stderr.write(prober.get_charset_name()
+ ' not active\n')
continue
cf = prober.get_confidence()
if constants._debug:
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(), cf))
if bestConf < cf:
bestConf = cf
self._mBestGuessProber = prober
if not self._mBestGuessProber:
return 0.0
return bestConf
# else:
# self._mBestGuessProber = self._mProbers[0]
# return self._mBestGuessProber.get_confidence()
| mit |
promenader/MyVimrc | vim/bundle/gundo.vim/autoload/gundo.py | 1 | 17190 | # ============================================================================
# File: gundo.py
# Description: vim global plugin to visualize your undo tree
# Maintainer: Steve Losh <steve@stevelosh.com>
# License: GPLv2+ -- look it up.
# Notes: Much of this code was thiefed from Mercurial, and the rest was
# heavily inspired by scratch.vim and histwin.vim.
#
# ============================================================================
import difflib
import itertools
import sys
import time
import vim
# Mercurial's graphlog code --------------------------------------------------------
def asciiedges(seen, rev, parents):
"""adds edge info to changelog DAG walk suitable for ascii()"""
if rev not in seen:
seen.append(rev)
nodeidx = seen.index(rev)
knownparents = []
newparents = []
for parent in parents:
if parent in seen:
knownparents.append(parent)
else:
newparents.append(parent)
ncols = len(seen)
seen[nodeidx:nodeidx + 1] = newparents
edges = [(nodeidx, seen.index(p)) for p in knownparents]
if len(newparents) > 0:
edges.append((nodeidx, nodeidx))
if len(newparents) > 1:
edges.append((nodeidx, nodeidx + 1))
nmorecols = len(seen) - ncols
return nodeidx, edges, ncols, nmorecols
def get_nodeline_edges_tail(
node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
# Still going in the same non-vertical direction.
if n_columns_diff == -1:
start = max(node_index + 1, p_node_index)
tail = ["|", " "] * (start - node_index - 1)
tail.extend(["/", " "] * (n_columns - start))
return tail
else:
return ["\\", " "] * (n_columns - node_index - 1)
else:
return ["|", " "] * (n_columns - node_index - 1)
def draw_edges(edges, nodeline, interline):
for (start, end) in edges:
if start == end + 1:
interline[2 * end + 1] = "/"
elif start == end - 1:
interline[2 * start + 1] = "\\"
elif start == end:
interline[2 * start] = "|"
else:
nodeline[2 * end] = "+"
if start > end:
(start, end) = (end, start)
for i in range(2 * start + 1, 2 * end):
if nodeline[i] != "+":
nodeline[i] = "-"
def fix_long_right_edges(edges):
for (i, (start, end)) in enumerate(edges):
if end > start:
edges[i] = (start, end + 1)
def ascii(buf, state, type, char, text, coldata):
"""prints an ASCII graph of the DAG
takes the following arguments (one call per node in the graph):
- Somewhere to keep the needed state in (init to asciistate())
- Column of the current node in the set of ongoing edges.
- Type indicator of node data == ASCIIDATA.
- Payload: (char, lines):
- Character to use as node's symbol.
- List of lines to display as the node's text.
- Edges; a list of (col, next_col) indicating the edges between
the current node and its parents.
- Number of columns (ongoing edges) in the current revision.
- The difference between the number of columns (ongoing edges)
in the next revision and the number of columns (ongoing edges)
in the current revision. That is: -1 means one column removed;
0 means no columns added or removed; 1 means one column added.
"""
idx, edges, ncols, coldiff = coldata
assert -2 < coldiff < 2
if coldiff == -1:
# Transform
#
# | | | | | |
# o | | into o---+
# |X / |/ /
# | | | |
fix_long_right_edges(edges)
# add_padding_line says whether to rewrite
#
# | | | | | | | |
# | o---+ into | o---+
# | / / | | | # <--- padding line
# o | | | / /
# o | |
add_padding_line = (len(text) > 2 and coldiff == -1 and
[x for (x, y) in edges if x + 1 < y])
# fix_nodeline_tail says whether to rewrite
#
# | | o | | | | o | |
# | | |/ / | | |/ /
# | o | | into | o / / # <--- fixed nodeline tail
# | |/ / | |/ /
# o | | o | |
fix_nodeline_tail = len(text) <= 2 and not add_padding_line
# nodeline is the line containing the node character (typically o)
nodeline = ["|", " "] * idx
nodeline.extend([char, " "])
nodeline.extend(
get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
state[0], fix_nodeline_tail))
# shift_interline is the line containing the non-vertical
# edges between this entry and the next
shift_interline = ["|", " "] * idx
if coldiff == -1:
n_spaces = 1
edge_ch = "/"
elif coldiff == 0:
n_spaces = 2
edge_ch = "|"
else:
n_spaces = 3
edge_ch = "\\"
shift_interline.extend(n_spaces * [" "])
shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
# draw edges from the current node to its parents
draw_edges(edges, nodeline, shift_interline)
# lines is the list of all graph lines to print
lines = [nodeline]
if add_padding_line:
lines.append(get_padding_line(idx, ncols, edges))
lines.append(shift_interline)
# make sure that there are as many graph lines as there are
# log strings
while len(text) < len(lines):
text.append("")
if len(lines) < len(text):
extra_interline = ["|", " "] * (ncols + coldiff)
while len(lines) < len(text):
lines.append(extra_interline)
# print lines
indentation_level = max(ncols, ncols + coldiff)
for (line, logstr) in zip(lines, text):
ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
buf.write(ln.rstrip() + '\n')
# ... and start over
state[0] = coldiff
state[1] = idx
def generate(dag, edgefn, current):
seen, state = [], [0, 0]
buf = Buffer()
for node, parents in list(dag):
if node.time:
age_label = age(int(node.time))
else:
age_label = 'Original'
line = '[%s] %s' % (node.n, age_label)
if node.n == current:
char = '@'
else:
char = 'o'
ascii(buf, state, 'C', char, [line], edgefn(seen, node, parents))
return buf.b
# Mercurial age function -----------------------------------------------------------
agescales = [("year", 3600 * 24 * 365),
("month", 3600 * 24 * 30),
("week", 3600 * 24 * 7),
("day", 3600 * 24),
("hour", 3600),
("minute", 60),
("second", 1)]
def age(ts):
'''turn a timestamp into an age string.'''
def plural(t, c):
if c == 1:
return t
return t + "s"
def fmt(t, c):
return "%d %s" % (c, plural(t, c))
now = time.time()
then = ts
if then > now:
return 'in the future'
delta = max(1, int(now - then))
if delta > agescales[0][1] * 2:
return time.strftime('%Y-%m-%d', time.gmtime(float(ts)))
for t, s in agescales:
n = delta // s
if n >= 2 or s == 1:
return '%s ago' % fmt(t, n)
# Python Vim utility functions -----------------------------------------------------
normal = lambda s: vim.command('normal %s' % s)
MISSING_BUFFER = "Cannot find Gundo's target buffer (%s)"
MISSING_WINDOW = "Cannot find window (%s) for Gundo's target buffer (%s)"
def _check_sanity():
'''Check to make sure we're not crazy.
Does the following things:
* Make sure the target buffer still exists.
'''
b = int(vim.eval('g:gundo_target_n'))
if not vim.eval('bufloaded(%d)' % b):
vim.command('echo "%s"' % (MISSING_BUFFER % b))
return False
w = int(vim.eval('bufwinnr(%d)' % b))
if w == -1:
vim.command('echo "%s"' % (MISSING_WINDOW % (w, b)))
return False
return True
def _goto_window_for_buffer(b):
w = int(vim.eval('bufwinnr(%d)' % int(b)))
vim.command('%dwincmd w' % w)
def _goto_window_for_buffer_name(bn):
b = vim.eval('bufnr("%s")' % bn)
return _goto_window_for_buffer(b)
def _undo_to(n):
n = int(n)
if n == 0:
vim.command('silent earlier %s' % (int(vim.eval('&undolevels')) + 1))
else:
vim.command('silent undo %d' % n)
INLINE_HELP = '''\
" Gundo for %s (%d)
" %s/%s - move between undo states
" p - preview diff of selected and current states
" <cr> - revert to selected state
'''
# Python undo tree data structures and functions -----------------------------------
class Buffer(object):
def __init__(self):
self.b = ''
def write(self, s):
self.b += s
class Node(object):
def __init__(self, n, parent, time, curhead):
self.n = int(n)
self.parent = parent
self.children = []
self.curhead = curhead
self.time = time
def _make_nodes(alts, nodes, parent=None):
p = parent
for alt in alts:
curhead = 'curhead' in alt
node = Node(n=alt['seq'], parent=p, time=alt['time'], curhead=curhead)
nodes.append(node)
if alt.get('alt'):
_make_nodes(alt['alt'], nodes, p)
p = node
def make_nodes():
ut = vim.eval('undotree()')
entries = ut['entries']
root = Node(0, None, False, 0)
nodes = []
_make_nodes(entries, nodes, root)
nodes.append(root)
nmap = dict((node.n, node) for node in nodes)
return nodes, nmap
def changenr(nodes):
_curhead_l = list(itertools.dropwhile(lambda n: not n.curhead, nodes))
if _curhead_l:
current = _curhead_l[0].parent.n
else:
current = int(vim.eval('changenr()'))
return current
# Gundo rendering ------------------------------------------------------------------
# Rendering utility functions
def _fmt_time(t):
return time.strftime('%Y-%m-%d %I:%M:%S %p', time.localtime(float(t)))
def _output_preview_text(lines):
_goto_window_for_buffer_name('__Gundo_Preview__')
vim.command('setlocal modifiable')
vim.current.buffer[:] = [line.rstrip('\n') for line in lines]
vim.command('setlocal nomodifiable')
def _generate_preview_diff(current, node_before, node_after):
_goto_window_for_buffer(vim.eval('g:gundo_target_n'))
if not node_after.n: # we're at the original file
before_lines = []
_undo_to(0)
after_lines = vim.current.buffer[:]
before_name = 'n/a'
before_time = ''
after_name = 'Original'
after_time = ''
elif not node_before.n: # we're at a pseudo-root state
_undo_to(0)
before_lines = vim.current.buffer[:]
_undo_to(node_after.n)
after_lines = vim.current.buffer[:]
before_name = 'Original'
before_time = ''
after_name = str(node_after.n)
after_time = _fmt_time(node_after.time)
else:
_undo_to(node_before.n)
before_lines = vim.current.buffer[:]
_undo_to(node_after.n)
after_lines = vim.current.buffer[:]
before_name = str(node_before.n)
before_time = _fmt_time(node_before.time)
after_name = str(node_after.n)
after_time = _fmt_time(node_after.time)
_undo_to(current)
return list(difflib.unified_diff(before_lines, after_lines,
before_name, after_name,
before_time, after_time))
def _generate_change_preview_diff(current, node_before, node_after):
_goto_window_for_buffer(vim.eval('g:gundo_target_n'))
_undo_to(node_before.n)
before_lines = vim.current.buffer[:]
_undo_to(node_after.n)
after_lines = vim.current.buffer[:]
before_name = str(node_before.n or 'Original')
before_time = node_before.time and _fmt_time(node_before.time) or ''
after_name = str(node_after.n or 'Original')
after_time = node_after.time and _fmt_time(node_after.time) or ''
_undo_to(current)
return list(difflib.unified_diff(before_lines, after_lines,
before_name, after_name,
before_time, after_time))
def GundoRenderGraph():
if not _check_sanity():
return
nodes, nmap = make_nodes()
for node in nodes:
node.children = [n for n in nodes if n.parent == node]
def walk_nodes(nodes):
for node in nodes:
if node.parent:
yield (node, [node.parent])
else:
yield (node, [])
dag = sorted(nodes, key=lambda n: int(n.n), reverse=True)
current = changenr(nodes)
result = generate(walk_nodes(dag), asciiedges, current).rstrip().splitlines()
result = [' ' + l for l in result]
target = (vim.eval('g:gundo_target_f'), int(vim.eval('g:gundo_target_n')))
mappings = (vim.eval('g:gundo_map_move_older'),
vim.eval('g:gundo_map_move_newer'))
if int(vim.eval('g:gundo_help')):
header = (INLINE_HELP % (target + mappings)).splitlines()
else:
header = []
vim.command('call s:GundoOpenGraph()')
vim.command('setlocal modifiable')
vim.current.buffer[:] = (header + result)
vim.command('setlocal nomodifiable')
i = 1
for line in result:
try:
line.split('[')[0].index('@')
i += 1
break
except ValueError:
pass
i += 1
vim.command('%d' % (i+len(header)-1))
def GundoRenderPreview():
if not _check_sanity():
return
target_state = vim.eval('s:GundoGetTargetState()')
# Check that there's an undo state. There may not be if we're talking about
# a buffer with no changes yet.
if target_state == None:
_goto_window_for_buffer_name('__Gundo__')
return
else:
target_state = int(target_state)
_goto_window_for_buffer(vim.eval('g:gundo_target_n'))
nodes, nmap = make_nodes()
current = changenr(nodes)
node_after = nmap[target_state]
node_before = node_after.parent
vim.command('call s:GundoOpenPreview()')
_output_preview_text(_generate_preview_diff(current, node_before, node_after))
_goto_window_for_buffer_name('__Gundo__')
def GundoRenderChangePreview():
if not _check_sanity():
return
target_state = vim.eval('s:GundoGetTargetState()')
# Check that there's an undo state. There may not be if we're talking about
# a buffer with no changes yet.
if target_state == None:
_goto_window_for_buffer_name('__Gundo__')
return
else:
target_state = int(target_state)
_goto_window_for_buffer(vim.eval('g:gundo_target_n'))
nodes, nmap = make_nodes()
current = changenr(nodes)
node_after = nmap[target_state]
node_before = nmap[current]
vim.command('call s:GundoOpenPreview()')
_output_preview_text(_generate_change_preview_diff(current, node_before, node_after))
_goto_window_for_buffer_name('__Gundo__')
# Gundo undo/redo
def GundoRevert():
if not _check_sanity():
return
target_n = int(vim.eval('s:GundoGetTargetState()'))
back = vim.eval('g:gundo_target_n')
_goto_window_for_buffer(back)
_undo_to(target_n)
vim.command('GundoRenderGraph')
if int(vim.eval('g:gundo_return_on_revert')):
_goto_window_for_buffer(back)
if int(vim.eval('g:gundo_close_on_revert')):
vim.command('GundoToggle')
def GundoPlayTo():
if not _check_sanity():
return
target_n = int(vim.eval('s:GundoGetTargetState()'))
back = int(vim.eval('g:gundo_target_n'))
delay = int(vim.eval('g:gundo_playback_delay'))
vim.command('echo "%s"' % back)
_goto_window_for_buffer(back)
normal('zR')
nodes, nmap = make_nodes()
start = nmap[changenr(nodes)]
end = nmap[target_n]
def _walk_branch(origin, dest):
rev = origin.n < dest.n
nodes = []
if origin.n > dest.n:
current, final = origin, dest
else:
current, final = dest, origin
while current.n >= final.n:
if current.n == final.n:
break
nodes.append(current)
current = current.parent
else:
return None
nodes.append(current)
if rev:
return reversed(nodes)
else:
return nodes
branch = _walk_branch(start, end)
if not branch:
vim.command('unsilent echo "No path to that node from here!"')
return
for node in branch:
_undo_to(node.n)
vim.command('GundoRenderGraph')
normal('zz')
_goto_window_for_buffer(back)
vim.command('redraw')
vim.command('sleep %dm' % delay)
def initPythonModule():
if sys.version_info[:2] < (2, 4):
vim.command('let s:has_supported_python = 0')
| gpl-3.0 |
aperigault/ansible | lib/ansible/modules/network/fortios/fortios_router_multicast.py | 21 | 30905 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_router_multicast
short_description: Configure router multicast in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify router feature and multicast category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
router_multicast:
description:
- Configure router multicast.
default: null
suboptions:
interface:
description:
- PIM interfaces.
suboptions:
bfd:
description:
- Enable/disable Protocol Independent Multicast (PIM) Bidirectional Forwarding Detection (BFD).
choices:
- enable
- disable
cisco-exclude-genid:
description:
- Exclude GenID from hello packets (compatibility with old Cisco IOS).
choices:
- enable
- disable
dr-priority:
description:
- DR election priority.
hello-holdtime:
description:
- Time before old neighbor information expires (0 - 65535 sec, default = 105).
hello-interval:
description:
- Interval between sending PIM hello messages (0 - 65535 sec, default = 30).
igmp:
description:
- IGMP configuration options.
suboptions:
access-group:
description:
- Groups IGMP hosts are allowed to join. Source router.access-list.name.
immediate-leave-group:
description:
- Groups to drop membership for immediately after receiving IGMPv2 leave. Source router.access-list.name.
last-member-query-count:
description:
- Number of group specific queries before removing group (2 - 7, default = 2).
last-member-query-interval:
description:
- Timeout between IGMPv2 leave and removing group (1 - 65535 msec, default = 1000).
query-interval:
description:
- Interval between queries to IGMP hosts (1 - 65535 sec, default = 125).
query-max-response-time:
description:
- Maximum time to wait for a IGMP query response (1 - 25 sec, default = 10).
query-timeout:
description:
- Timeout between queries before becoming querier for network (60 - 900, default = 255).
router-alert-check:
description:
- Enable/disable require IGMP packets contain router alert option.
choices:
- enable
- disable
version:
description:
- Maximum version of IGMP to support.
choices:
- 3
- 2
- 1
join-group:
description:
- Join multicast groups.
suboptions:
address:
description:
- Multicast group IP address.
required: true
multicast-flow:
description:
- Acceptable source for multicast group. Source router.multicast-flow.name.
name:
description:
- Interface name. Source system.interface.name.
required: true
neighbour-filter:
description:
- Routers acknowledged as neighbor routers. Source router.access-list.name.
passive:
description:
- Enable/disable listening to IGMP but not participating in PIM.
choices:
- enable
- disable
pim-mode:
description:
- PIM operation mode.
choices:
- sparse-mode
- dense-mode
propagation-delay:
description:
- Delay flooding packets on this interface (100 - 5000 msec, default = 500).
rp-candidate:
description:
- Enable/disable compete to become RP in elections.
choices:
- enable
- disable
rp-candidate-group:
description:
- Multicast groups managed by this RP. Source router.access-list.name.
rp-candidate-interval:
description:
- RP candidate advertisement interval (1 - 16383 sec, default = 60).
rp-candidate-priority:
description:
- Router's priority as RP.
state-refresh-interval:
description:
- Interval between sending state-refresh packets (1 - 100 sec, default = 60).
static-group:
description:
- Statically set multicast groups to forward out. Source router.multicast-flow.name.
ttl-threshold:
description:
- Minimum TTL of multicast packets that will be forwarded (applied only to new multicast routes) (1 - 255, default = 1).
multicast-routing:
description:
- Enable/disable IP multicast routing.
choices:
- enable
- disable
pim-sm-global:
description:
- PIM sparse-mode global settings.
suboptions:
accept-register-list:
description:
- Sources allowed to register packets with this Rendezvous Point (RP). Source router.access-list.name.
accept-source-list:
description:
- Sources allowed to send multicast traffic. Source router.access-list.name.
bsr-allow-quick-refresh:
description:
- Enable/disable accept BSR quick refresh packets from neighbors.
choices:
- enable
- disable
bsr-candidate:
description:
- Enable/disable allowing this router to become a bootstrap router (BSR).
choices:
- enable
- disable
bsr-hash:
description:
- BSR hash length (0 - 32, default = 10).
bsr-interface:
description:
- Interface to advertise as candidate BSR. Source system.interface.name.
bsr-priority:
description:
- BSR priority (0 - 255, default = 0).
cisco-crp-prefix:
description:
- Enable/disable making candidate RP compatible with old Cisco IOS.
choices:
- enable
- disable
cisco-ignore-rp-set-priority:
description:
- Use only hash for RP selection (compatibility with old Cisco IOS).
choices:
- enable
- disable
cisco-register-checksum:
description:
- Checksum entire register packet(for old Cisco IOS compatibility).
choices:
- enable
- disable
cisco-register-checksum-group:
description:
- Cisco register checksum only these groups. Source router.access-list.name.
join-prune-holdtime:
description:
- Join/prune holdtime (1 - 65535, default = 210).
message-interval:
description:
- Period of time between sending periodic PIM join/prune messages in seconds (1 - 65535, default = 60).
null-register-retries:
description:
- Maximum retries of null register (1 - 20, default = 1).
register-rate-limit:
description:
- Limit of packets/sec per source registered through this RP (0 - 65535, default = 0 which means unlimited).
register-rp-reachability:
description:
- Enable/disable check RP is reachable before registering packets.
choices:
- enable
- disable
register-source:
description:
- Override source address in register packets.
choices:
- disable
- interface
- ip-address
register-source-interface:
description:
- Override with primary interface address. Source system.interface.name.
register-source-ip:
description:
- Override with local IP address.
register-supression:
description:
- Period of time to honor register-stop message (1 - 65535 sec, default = 60).
rp-address:
description:
- Statically configure RP addresses.
suboptions:
group:
description:
- Groups to use this RP. Source router.access-list.name.
id:
description:
- ID.
required: true
ip-address:
description:
- RP router address.
rp-register-keepalive:
description:
- Timeout for RP receiving data on (S,G) tree (1 - 65535 sec, default = 185).
spt-threshold:
description:
- Enable/disable switching to source specific trees.
choices:
- enable
- disable
spt-threshold-group:
description:
- Groups allowed to switch to source tree. Source router.access-list.name.
ssm:
description:
- Enable/disable source specific multicast.
choices:
- enable
- disable
ssm-range:
description:
- Groups allowed to source specific multicast. Source router.access-list.name.
route-limit:
description:
- Maximum number of multicast routes.
route-threshold:
description:
- Generate warnings when the number of multicast routes exceeds this number, must not be greater than route-limit.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure router multicast.
fortios_router_multicast:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
router_multicast:
interface:
-
bfd: "enable"
cisco-exclude-genid: "enable"
dr-priority: "6"
hello-holdtime: "7"
hello-interval: "8"
igmp:
access-group: "<your_own_value> (source router.access-list.name)"
immediate-leave-group: "<your_own_value> (source router.access-list.name)"
last-member-query-count: "12"
last-member-query-interval: "13"
query-interval: "14"
query-max-response-time: "15"
query-timeout: "16"
router-alert-check: "enable"
version: "3"
join-group:
-
address: "<your_own_value>"
multicast-flow: "<your_own_value> (source router.multicast-flow.name)"
name: "default_name_22 (source system.interface.name)"
neighbour-filter: "<your_own_value> (source router.access-list.name)"
passive: "enable"
pim-mode: "sparse-mode"
propagation-delay: "26"
rp-candidate: "enable"
rp-candidate-group: "<your_own_value> (source router.access-list.name)"
rp-candidate-interval: "29"
rp-candidate-priority: "30"
state-refresh-interval: "31"
static-group: "<your_own_value> (source router.multicast-flow.name)"
ttl-threshold: "33"
multicast-routing: "enable"
pim-sm-global:
accept-register-list: "<your_own_value> (source router.access-list.name)"
accept-source-list: "<your_own_value> (source router.access-list.name)"
bsr-allow-quick-refresh: "enable"
bsr-candidate: "enable"
bsr-hash: "40"
bsr-interface: "<your_own_value> (source system.interface.name)"
bsr-priority: "42"
cisco-crp-prefix: "enable"
cisco-ignore-rp-set-priority: "enable"
cisco-register-checksum: "enable"
cisco-register-checksum-group: "<your_own_value> (source router.access-list.name)"
join-prune-holdtime: "47"
message-interval: "48"
null-register-retries: "49"
register-rate-limit: "50"
register-rp-reachability: "enable"
register-source: "disable"
register-source-interface: "<your_own_value> (source system.interface.name)"
register-source-ip: "<your_own_value>"
register-supression: "55"
rp-address:
-
group: "<your_own_value> (source router.access-list.name)"
id: "58"
ip-address: "<your_own_value>"
rp-register-keepalive: "60"
spt-threshold: "enable"
spt-threshold-group: "<your_own_value> (source router.access-list.name)"
ssm: "enable"
ssm-range: "<your_own_value> (source router.access-list.name)"
route-limit: "65"
route-threshold: "66"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_router_multicast_data(json):
option_list = ['interface', 'multicast-routing', 'pim-sm-global',
'route-limit', 'route-threshold']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def router_multicast(data, fos):
vdom = data['vdom']
router_multicast_data = data['router_multicast']
flattened_data = flatten_multilists_attributes(router_multicast_data)
filtered_data = filter_router_multicast_data(flattened_data)
return fos.set('router',
'multicast',
data=filtered_data,
vdom=vdom)
def fortios_router(data, fos):
login(data)
if data['router_multicast']:
resp = router_multicast(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"router_multicast": {
"required": False, "type": "dict",
"options": {
"interface": {"required": False, "type": "list",
"options": {
"bfd": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cisco-exclude-genid": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dr-priority": {"required": False, "type": "int"},
"hello-holdtime": {"required": False, "type": "int"},
"hello-interval": {"required": False, "type": "int"},
"igmp": {"required": False, "type": "dict",
"options": {
"access-group": {"required": False, "type": "str"},
"immediate-leave-group": {"required": False, "type": "str"},
"last-member-query-count": {"required": False, "type": "int"},
"last-member-query-interval": {"required": False, "type": "int"},
"query-interval": {"required": False, "type": "int"},
"query-max-response-time": {"required": False, "type": "int"},
"query-timeout": {"required": False, "type": "int"},
"router-alert-check": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"version": {"required": False, "type": "str",
"choices": ["3", "2", "1"]}
}},
"join-group": {"required": False, "type": "list",
"options": {
"address": {"required": True, "type": "str"}
}},
"multicast-flow": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"neighbour-filter": {"required": False, "type": "str"},
"passive": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"pim-mode": {"required": False, "type": "str",
"choices": ["sparse-mode", "dense-mode"]},
"propagation-delay": {"required": False, "type": "int"},
"rp-candidate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rp-candidate-group": {"required": False, "type": "str"},
"rp-candidate-interval": {"required": False, "type": "int"},
"rp-candidate-priority": {"required": False, "type": "int"},
"state-refresh-interval": {"required": False, "type": "int"},
"static-group": {"required": False, "type": "str"},
"ttl-threshold": {"required": False, "type": "int"}
}},
"multicast-routing": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"pim-sm-global": {"required": False, "type": "dict",
"options": {
"accept-register-list": {"required": False, "type": "str"},
"accept-source-list": {"required": False, "type": "str"},
"bsr-allow-quick-refresh": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"bsr-candidate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"bsr-hash": {"required": False, "type": "int"},
"bsr-interface": {"required": False, "type": "str"},
"bsr-priority": {"required": False, "type": "int"},
"cisco-crp-prefix": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cisco-ignore-rp-set-priority": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cisco-register-checksum": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cisco-register-checksum-group": {"required": False, "type": "str"},
"join-prune-holdtime": {"required": False, "type": "int"},
"message-interval": {"required": False, "type": "int"},
"null-register-retries": {"required": False, "type": "int"},
"register-rate-limit": {"required": False, "type": "int"},
"register-rp-reachability": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"register-source": {"required": False, "type": "str",
"choices": ["disable", "interface", "ip-address"]},
"register-source-interface": {"required": False, "type": "str"},
"register-source-ip": {"required": False, "type": "str"},
"register-supression": {"required": False, "type": "int"},
"rp-address": {"required": False, "type": "list",
"options": {
"group": {"required": False, "type": "str"},
"id": {"required": True, "type": "int"},
"ip-address": {"required": False, "type": "str"}
}},
"rp-register-keepalive": {"required": False, "type": "int"},
"spt-threshold": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"spt-threshold-group": {"required": False, "type": "str"},
"ssm": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssm-range": {"required": False, "type": "str"}
}},
"route-limit": {"required": False, "type": "int"},
"route-threshold": {"required": False, "type": "int"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_router(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
dominikl/openmicroscopy | components/tools/OmeroWeb/omeroweb/webclient/tree.py | 3 | 61675 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2015 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
''' Helper functions for views that handle object trees '''
import time
import omero
from omero.rtypes import rlong, unwrap, wrap
from django.conf import settings
from django.http import Http404
from datetime import datetime
from copy import deepcopy
def build_clause(components, name='', join=','):
''' Build a string from a list of components.
This is to simplify building where clauses in particular that
may optionally have zero, one or more parts
'''
if not components:
return ''
return ' ' + name + ' ' + (' ' + join + ' ').join(components) + ' '
def parse_permissions_css(permissions, ownerid, conn):
''' Parse numeric permissions into a string of space separated
CSS classes.
@param permissions Permissions to parse
@type permissions L{omero.rtypes.rmap}
@param ownerid Owner Id for the object having Permissions
@type ownerId Integer
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
'''
restrictions = ('canEdit',
'canAnnotate',
'canLink',
'canDelete')
permissionsCss = [r for r in restrictions if permissions.get(r)]
if ownerid == conn.getUserId():
permissionsCss.append("isOwned")
if ownerid == conn.getUserId() or conn.isAdmin():
permissionsCss.append("canChgrp")
return ' '.join(permissionsCss)
def _marshal_group(conn, row):
''' Given an ExperimenterGroup row (list) marshals it into a dictionary.
Order and type of columns in row is:
* id (rlong)
* name (rstring)
* permissions (dict)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Group row to marshal
@type row L{list}
'''
group_id, name, permissions = row
group = dict()
group['id'] = unwrap(group_id)
group['name'] = unwrap(name)
group['perm'] = unwrap(unwrap(permissions)['perm'])
return group
def marshal_groups(conn, member_id=-1, page=1, limit=settings.PAGE):
''' Marshals groups
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param member_id The ID of the experimenter to filter by
or -1 for all
defaults to -1
@type member_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
groups = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
service_opts.setOmeroGroup(-1)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
join_clause = ''
where_clause = ''
if member_id != -1:
params.add('mid', rlong(member_id))
join_clause = ' join grp.groupExperimenterMap grexp '
where_clause = ' and grexp.child.id = :mid '
qs = conn.getQueryService()
q = """
select grp.id,
grp.name,
grp.details.permissions
from ExperimenterGroup grp
%s
where grp.name != 'user'
%s
order by lower(grp.name)
""" % (join_clause, where_clause)
for e in qs.projection(q, params, service_opts):
groups.append(_marshal_group(conn, e[0:3]))
return groups
def _marshal_experimenter(conn, row):
''' Given an Experimenter row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* omeName (rstring)
* firstName (rstring)
* lastName (rstring)
* email (rstring)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Experimenter row to marshal
@type row L{list}
'''
experimenter_id, ome_name, first_name, last_name, email = row
experimenter = dict()
experimenter['id'] = unwrap(experimenter_id)
experimenter['omeName'] = unwrap(ome_name)
experimenter['firstName'] = unwrap(first_name)
experimenter['lastName'] = unwrap(last_name)
# Email is not mandatory
if email:
experimenter['email'] = unwrap(email)
return experimenter
def marshal_experimenters(conn, group_id=-1, page=1, limit=settings.PAGE):
''' Marshals experimenters, possibly filtered by group.
To make this consistent with the other tree.py functions
this will default to restricting the results by the calling
experimenters group membership. e.g. if user is in groupA
and groupB, then users from groupA and groupB will be
returned.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
experimenters = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
if group_id is None:
group_id = -1
# This does not actually restrict the results so the restriction to
# a certain group is done in the query
service_opts.setOmeroGroup(-1)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
where_clause = ''
if group_id != -1:
params.add('gid', rlong(group_id))
where_clause = '''
join experimenter.groupExperimenterMap grexp
where grexp.parent.id = :gid
'''
# Don't currently need this filtering
# Restrict by the current user's group membership
# else:
# params.add('eid', rlong(conn.getUserId()))
# where_clause = '''
# join experimenter.groupExperimenterMap grexp
# where grexp.child.id = :eid
# '''
qs = conn.getQueryService()
q = """
select experimenter.id,
experimenter.omeName,
experimenter.firstName,
experimenter.lastName,
experimenter.email
from Experimenter experimenter %s
order by lower(experimenter.omeName), experimenter.id
""" % (where_clause)
for e in qs.projection(q, params, service_opts):
experimenters.append(_marshal_experimenter(conn, e[0:5]))
return experimenters
def marshal_experimenter(conn, experimenter_id):
''' Marshals experimenter.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param experimenter_id The Experimenter ID to get details for
@type experimenter_id L{long}
'''
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
service_opts.setOmeroGroup(-1)
params.add('id', rlong(experimenter_id))
qs = conn.getQueryService()
q = """
select experimenter.id,
experimenter.omeName,
experimenter.firstName,
experimenter.lastName,
experimenter.email
from Experimenter experimenter
where experimenter.id = :id
"""
rows = qs.projection(q, params, service_opts)
if len(rows) != 1:
raise Http404("No Experimenter found with ID %s" % experimenter_id)
return _marshal_experimenter(conn, rows[0][0:5])
def _marshal_project(conn, row):
''' Given a Project row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* child_count (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Project row to marshal
@type row L{list}
'''
project_id, name, owner_id, permissions, child_count = row
project = dict()
project['id'] = unwrap(project_id)
project['name'] = unwrap(name)
project['ownerId'] = unwrap(owner_id)
project['childCount'] = unwrap(child_count)
project['permsCss'] = \
parse_permissions_css(permissions, unwrap(owner_id), conn)
return project
def marshal_projects(conn, group_id=-1, experimenter_id=-1,
page=1, limit=settings.PAGE):
''' Marshals projects
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
projects = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
where_clause = ''
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause = 'where project.details.owner.id = :id'
qs = conn.getQueryService()
q = """
select new map(project.id as id,
project.name as name,
project.details.owner.id as ownerId,
project as project_details_permissions,
(select count(id) from ProjectDatasetLink pdl
where pdl.parent = project.id) as childCount)
from Project project
%s
order by lower(project.name), project.id
""" % (where_clause)
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"], e[0]["name"], e[0]["ownerId"],
e[0]["project_details_permissions"], e[0]["childCount"]]
projects.append(_marshal_project(conn, e[0:5]))
return projects
def _marshal_dataset(conn, row):
''' Given a Dataset row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* child_count (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Dataset row to marshal
@type row L{list}
'''
dataset_id, name, owner_id, permissions, child_count = row
dataset = dict()
dataset['id'] = unwrap(dataset_id)
dataset['name'] = unwrap(name)
dataset['ownerId'] = unwrap(owner_id)
dataset['childCount'] = unwrap(child_count)
dataset['permsCss'] = \
parse_permissions_css(permissions, unwrap(owner_id), conn)
return dataset
def marshal_datasets(conn, project_id=None, orphaned=False, group_id=-1,
experimenter_id=-1, page=1, limit=settings.PAGE):
''' Marshals datasets
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param project_id The Project ID to filter by or `None` to
not filter by a specific project.
defaults to `None`
@type project_id L{long}
@param orphaned If this is to filter by orphaned data. Overridden
by project_id.
defaults to False
@type orphaned Boolean
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
datasets = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
where_clause = []
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause.append('dataset.details.owner.id = :id')
qs = conn.getQueryService()
q = """
select new map(dataset.id as id,
dataset.name as name,
dataset.details.owner.id as ownerId,
dataset as dataset_details_permissions,
(select count(id) from DatasetImageLink dil
where dil.parent=dataset.id) as childCount)
from Dataset dataset
"""
# If this is a query to get datasets from a parent project
if project_id:
params.add('pid', rlong(project_id))
q += 'join dataset.projectLinks plink'
where_clause.append('plink.parent.id = :pid')
# If this is a query to get datasets with no parent project
elif orphaned:
where_clause.append(
"""
not exists (
select pdlink from ProjectDatasetLink as pdlink
where pdlink.child = dataset.id
)
"""
)
q += """
%s
order by lower(dataset.name), dataset.id
""" % build_clause(where_clause, 'where', 'and')
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["dataset_details_permissions"],
e[0]["childCount"]]
datasets.append(_marshal_dataset(conn, e[0:5]))
return datasets
def _marshal_date(time):
d = datetime.fromtimestamp(time/1000)
return d.isoformat() + 'Z'
def _marshal_image(conn, row, row_pixels=None, share_id=None,
date=None, acqDate=None, thumbVersion=None):
''' Given an Image row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* fileset_id (rlong)
May also take a row_pixels (list) if X,Y,Z dimensions are loaded
* pixels.sizeX (rlong)
* pixels.sizeY (rlong)
* pixels.sizeZ (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Image row to marshal
@type row L{list}
@param row_pixels The Image row pixels data to marshal
@type row_pixels L{list}
'''
image_id, name, owner_id, permissions, fileset_id = row
image = dict()
image['id'] = unwrap(image_id)
image['name'] = unwrap(name)
image['ownerId'] = unwrap(owner_id)
image['permsCss'] = parse_permissions_css(permissions,
unwrap(owner_id), conn)
fileset_id_val = unwrap(fileset_id)
if fileset_id_val is not None:
image['filesetId'] = fileset_id_val
if row_pixels:
sizeX, sizeY, sizeZ = row_pixels
image['sizeX'] = unwrap(sizeX)
image['sizeY'] = unwrap(sizeY)
image['sizeZ'] = unwrap(sizeZ)
if share_id is not None:
image['shareId'] = share_id
if date is not None:
image['date'] = _marshal_date(unwrap(date))
if acqDate is not None:
image['acqDate'] = _marshal_date(unwrap(acqDate))
if thumbVersion is not None:
image['thumbVersion'] = thumbVersion
return image
def _marshal_image_deleted(conn, image_id):
''' Given an Image id and marshals it into a dictionary.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param image_id The image id to marshal
@type image_id L{long}
'''
return {
'id': unwrap(image_id),
'deleted': True
}
def marshal_images(conn, dataset_id=None, orphaned=False, share_id=None,
load_pixels=False, group_id=-1, experimenter_id=-1,
page=1, date=False, thumb_version=False,
limit=settings.PAGE):
''' Marshals images
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param dataset_id The Dataset ID to filter by or `None` to
not filter by a specific dataset.
defaults to `None`
@type dataset_id L{long}
@param orphaned If this is to filter by orphaned data. Overridden
by dataset_id.
defaults to False
@type orphaned Boolean
@param share_id The Share ID to filter by or `None` to
not filter by a specific share.
defaults to `None`
@type share_id L{long}
@param load_pixels Whether to load the X,Y,Z dimensions
@type load_pixels Boolean
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
images = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
from_join_clauses = []
where_clause = []
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause.append('image.details.owner.id = :id')
qs = conn.getQueryService()
extraValues = ""
if load_pixels:
extraValues = """
,
pix.sizeX as sizeX,
pix.sizeY as sizeY,
pix.sizeZ as sizeZ
"""
if date:
extraValues += """,
image.details.creationEvent.time as date,
image.acquisitionDate as acqDate
"""
q = """
select new map(image.id as id,
image.name as name,
image.details.owner.id as ownerId,
image as image_details_permissions,
image.fileset.id as filesetId %s)
""" % extraValues
from_join_clauses.append('Image image')
if load_pixels:
# We use 'left outer join', since we still want images if no pixels
from_join_clauses.append('left outer join image.pixels pix')
# If this is a query to get images from a parent dataset
if dataset_id is not None:
params.add('did', rlong(dataset_id))
from_join_clauses.append('join image.datasetLinks dlink')
where_clause.append('dlink.parent.id = :did')
# If this is a query to get images with no parent datasets (orphans)
# At the moment the implementation assumes that a cross-linked
# object is not an orphan. We may need to change that so that a user
# see all the data that belongs to them that is not assigned to a container
# that they own.
elif orphaned:
orphan_where = """
not exists (
select dilink from DatasetImageLink as dilink
where dilink.child = image.id
"""
# This is what is necessary if an orphan means that it has no
# container that belongs to the image owner. This corresponds
# to marshal_orphaned as well because of the child count
# if experimenter_id is not None and experimenter_id != -1:
# orphan_where += ' and dilink.parent.details.owner.id = :id '
orphan_where += ') '
where_clause.append(orphan_where)
# Also discount any images which are part of a screen. No need to
# take owner into account on this because we don't want them in
# orphans either way
where_clause.append(
"""
not exists (
select ws from WellSample ws
where ws.image.id = image.id
)
"""
)
# If this is a query to get images in a share
if share_id is not None:
# Get the contents of the blob which contains the images in the share
# Would be nice to do this without the ShareService, preferably as part
# of the single query
image_rids = [image_rid.getId().val
for image_rid
in conn.getShareService().getContents(share_id)
if isinstance(image_rid, omero.model.ImageI)]
# If there are no images in the share, don't bother querying
if not image_rids:
return images
params.add('iids', wrap(image_rids))
where_clause.append('image.id in (:iids)')
q += """
%s %s
order by lower(image.name), image.id
""" % (' from ' + ' '.join(from_join_clauses),
build_clause(where_clause, 'where', 'and'))
for e in qs.projection(q, params, service_opts):
e = unwrap(e)[0]
d = [e["id"],
e["name"],
e["ownerId"],
e["image_details_permissions"],
e["filesetId"]]
kwargs = {'conn': conn, 'row': d[0:5]}
if load_pixels:
d = [e["sizeX"], e["sizeY"], e["sizeZ"]]
kwargs['row_pixels'] = d
if date:
kwargs['acqDate'] = e['acqDate']
kwargs['date'] = e['date']
# While marshalling the images, determine if there are any
# images mentioned in shares that are not in the results
# because they have been deleted
if share_id is not None and image_rids and e["id"] in image_rids:
image_rids.remove(e["id"])
kwargs['share_id'] = share_id
images.append(_marshal_image(**kwargs))
# Load thumbnails separately
# We want version of most recent thumbnail (max thumbId) owned by user
if thumb_version and len(images) > 0:
userId = conn.getUserId()
iids = [i['id'] for i in images]
params = omero.sys.ParametersI()
params.addIds(iids)
params.add('thumbOwner', wrap(userId))
q = """select image.id, thumbs.version from Image image
join image.pixels pix join pix.thumbnails thumbs
where image.id in (:ids)
and thumbs.id = (
select max(t.id)
from Thumbnail t
where t.pixels = pix.id
and t.details.owner.id = :thumbOwner
)
"""
thumbVersions = {}
for t in qs.projection(q, params, service_opts):
iid, tv = unwrap(t)
thumbVersions[iid] = tv
# For all images, set thumb version if we have it...
for i in images:
if i['id'] in thumbVersions:
i['thumbVersion'] = thumbVersions[i['id']]
# If there were any deleted images in the share, marshal and return
# those
if share_id is not None and image_rids:
for image_rid in image_rids:
images.append(_marshal_image_deleted(conn, image_rid))
return images
def _marshal_screen(conn, row):
''' Given a Screen row (list) marshals it into a dictionary. Order and
type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* child_count (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Screen row to marshal
@type row L{list}
'''
screen_id, name, owner_id, permissions, child_count = row
screen = dict()
screen['id'] = unwrap(screen_id)
screen['name'] = unwrap(name)
screen['ownerId'] = unwrap(owner_id)
screen['childCount'] = unwrap(child_count)
screen['permsCss'] = \
parse_permissions_css(permissions, unwrap(owner_id), conn)
return screen
def marshal_screens(conn, group_id=-1, experimenter_id=-1, page=1,
limit=settings.PAGE):
''' Marshals screens
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
screens = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
where_clause = ''
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause = 'where screen.details.owner.id = :id'
qs = conn.getQueryService()
q = """
select new map(screen.id as id,
screen.name as name,
screen.details.owner.id as ownerId,
screen as screen_details_permissions,
(select count(spl.id) from ScreenPlateLink spl
where spl.parent=screen.id) as childCount)
from Screen screen
%s
order by lower(screen.name), screen.id
""" % where_clause
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["screen_details_permissions"],
e[0]["childCount"]]
screens.append(_marshal_screen(conn, e[0:5]))
return screens
def _marshal_plate(conn, row):
''' Given a Plate row (list) marshals it into a dictionary. Order and
type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* child_count (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Plate row to marshal
@type row L{list}
'''
plate_id, name, owner_id, permissions, child_count = row
plate = dict()
plate['id'] = unwrap(plate_id)
plate['name'] = unwrap(name)
plate['ownerId'] = unwrap(owner_id)
plate['childCount'] = unwrap(child_count)
plate['permsCss'] = \
parse_permissions_css(permissions, unwrap(owner_id), conn)
return plate
def marshal_plates(conn, screen_id=None, orphaned=False, group_id=-1,
experimenter_id=-1, page=1, limit=settings.PAGE):
''' Marshals plates
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param screen_id The Screen ID to filter by or `None` to
not filter by a specific screen.
defaults to `None`
@type screen_id L{long}
@param orphaned If this is to filter by orphaned data. Overridden
by dataset_id.
defaults to False
@type orphaned Boolean
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
plates = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
where_clause = []
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause.append('plate.details.owner.id = :id')
qs = conn.getQueryService()
q = """
select new map(plate.id as id,
plate.name as name,
plate.details.owner.id as ownerId,
plate as plate_details_permissions,
(select count(pa.id) from PlateAcquisition pa
where pa.plate.id=plate.id) as childCount)
from Plate plate
"""
# If this is a query to get plates from a parent screen
if screen_id is not None:
params.add('sid', rlong(screen_id))
q += 'join plate.screenLinks slink'
where_clause.append('slink.parent.id = :sid')
# If this is a query to get plates with no parent screens
elif orphaned:
where_clause.append(
"""
not exists (
select splink from ScreenPlateLink as splink
where splink.child = plate.id
)
"""
)
q += """
%s
order by lower(plate.name), plate.id
""" % build_clause(where_clause, 'where', 'and')
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["plate_details_permissions"],
e[0]["childCount"]]
plates.append(_marshal_plate(conn, e[0:5]))
return plates
def _marshal_plate_acquisition(conn, row):
''' Given a PlateAcquisition row (list) marshals it into a dictionary.
Order and type of columns in row is:
* id (rlong)
* name (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* startTime (rtime)
* endTime (rtime)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The PlateAcquisition row to marshal
@type row L{list}
'''
pa_id, name, owner_id, permissions, start_time, end_time = row
plate_acquisition = dict()
plate_acquisition['id'] = unwrap(pa_id)
# If there is no defined name, base it on the start/end time if that
# exists or finally default to an id based name
if name is not None:
plate_acquisition['name'] = unwrap(name)
elif start_time is not None and end_time is not None:
start_time = datetime.utcfromtimestamp(unwrap(start_time) / 1000.0)
end_time = datetime.utcfromtimestamp(unwrap(end_time) / 1000.0)
plate_acquisition['name'] = '%s - %s' % (start_time, end_time)
else:
plate_acquisition['name'] = 'Run %d' % unwrap(pa_id)
plate_acquisition['ownerId'] = unwrap(owner_id)
plate_acquisition['permsCss'] = \
parse_permissions_css(permissions, unwrap(owner_id), conn)
return plate_acquisition
def marshal_plate_acquisitions(conn, plate_id, page=1, limit=settings.PAGE):
''' Marshals plate acquisitions ('runs')
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param plate_id The Plate ID to filter by
@type plate_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
plate_acquisitions = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
service_opts.setOmeroGroup(-1)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
params.add('pid', rlong(plate_id))
qs = conn.getQueryService()
q = """
select new map(pa.id as id,
pa.name as name,
pa.details.owner.id as ownerId,
pa as pa_details_permissions,
pa.startTime as startTime,
pa.endTime as endTime)
from PlateAcquisition pa
where pa.plate.id = :pid
order by pa.id
"""
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["pa_details_permissions"],
e[0]["startTime"],
e[0]["endTime"]]
plate_acquisitions.append(_marshal_plate_acquisition(conn, e[0:6]))
return plate_acquisitions
def marshal_orphaned(conn, group_id=-1, experimenter_id=-1, page=1,
limit=settings.PAGE):
''' Marshals orphaned containers
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
qs = conn.getQueryService()
# Count all the images that do not have Datasets as parents or are
# not in a WellSample
q = '''
select count(image.id) from Image image
where
'''
if experimenter_id is not None and experimenter_id != -1:
q += '''
image.details.owner.id = :id
and
'''
q += '''
not exists (
select dilink from DatasetImageLink as dilink
where dilink.child.id = image.id
'''
# This corresponse to the user specific orphan restriction described
# in the orphan section of marshal_images
# q += ' and dilink.parent.details.owner.id = :id '
q += '''
)
and not exists (
select ws from WellSample ws
where ws.image.id = image.id
)
'''
count = unwrap(qs.projection(q, params, service_opts)[0][0])
orphaned = dict()
# In orphans, record the id as the experimenter
orphaned['id'] = experimenter_id or -1
orphaned['childCount'] = count
return orphaned
def _marshal_tag(conn, row):
''' Given a Tag row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* text_value (rstring)
* description (rstring)
* details.owner.id (rlong)
* details.permissions (dict)
* namespace (rstring)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Tag row to marshal
@type row L{list}
'''
tag_id, text_value, description, owner_id, permissions, namespace, \
child_count = row
tag = dict()
tag['id'] = unwrap(tag_id)
tag['value'] = unwrap(text_value)
desc = unwrap(description)
if desc:
tag['description'] = desc
tag['ownerId'] = unwrap(owner_id)
tag['permsCss'] = parse_permissions_css(permissions,
unwrap(owner_id), conn)
if namespace and unwrap(namespace) == \
omero.constants.metadata.NSINSIGHTTAGSET:
tag['set'] = True
else:
tag['set'] = False
tag['childCount'] = unwrap(child_count)
return tag
def marshal_tags(conn, tag_id=None, group_id=-1, experimenter_id=-1, page=1,
orphaned=False, limit=settings.PAGE):
''' Marshals tags
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param tag_id The tag ID to filter by
@type tag_id L{long}
defaults to `None`
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
tags = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
qs = conn.getQueryService()
# Restricted by the specified tag set
if tag_id is not None:
params.add('tid', rlong(tag_id))
q = '''
select new map(aalink.child.id as id,
aalink.child.textValue as textValue,
aalink.child.description as description,
aalink.child.details.owner.id as ownerId,
aalink.child as tag_details_permissions,
aalink.child.ns as ns,
(select count(aalink2)
from AnnotationAnnotationLink aalink2
where aalink2.child.class=TagAnnotation
and aalink2.parent.id=aalink.child.id) as childCount)
from AnnotationAnnotationLink aalink
where aalink.parent.class=TagAnnotation
and aalink.child.class=TagAnnotation
and aalink.parent.id=:tid
'''
# Restricted by the specified user
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
q += '''
and aalink.child.details.owner.id = :id
'''
# TODO Is ordering by id here (and below) the right thing to do?
q += '''
order by aalink.child.id
'''
# All
else:
where_clause = []
q = '''
select new map(tag.id as id,
tag.textValue as textValue,
tag.description as description,
tag.details.owner.id as ownerId,
tag as tag_details_permissions,
tag.ns as ns,
(select count(aalink2)
from AnnotationAnnotationLink aalink2
where aalink2.child.class=TagAnnotation
and aalink2.parent.id=tag.id) as childCount)
from TagAnnotation tag
'''
# Orphaned tags are those not tagged by a 'tagset'
if orphaned:
where_clause.append(
'''
not exists (
select aalink from AnnotationAnnotationLink as aalink
where aalink.child = tag.id
and aalink.parent.ns = '%s'
)
''' % omero.constants.metadata.NSINSIGHTTAGSET
)
# Restricted by the specified user
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
where_clause.append(
'''
tag.details.owner.id = :id
'''
)
q += """
%s
order by tag.id
""" % build_clause(where_clause, 'where', 'and')
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["textValue"],
e[0]["description"],
e[0]["ownerId"],
e[0]["tag_details_permissions"],
e[0]["ns"],
e[0]["childCount"]]
tags.append(_marshal_tag(conn, e[0:7]))
return tags
# TODO This could be built into the standard container marshalling as a filter
# as basically this is just the same as running several of those queries. Park
# this for now, but revisit later
# This also has a slightly different interface to the other marshals in that it
# returns a dictionary of the tagged types. This would also disappear if the
# above marshalling functions had filter functions added as one of those would
# be called each per object type instead of this one for all
def marshal_tagged(conn, tag_id, group_id=-1, experimenter_id=-1, page=1,
load_pixels=False, date=False, limit=settings.PAGE):
''' Marshals tagged data
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param tag_id The tag ID to filter by
@type tag_id L{long}
@param group_id The Group ID to filter by or -1 for all groups,
defaults to -1
@type group_id L{long}
@param experimenter_id The Experimenter (user) ID to filter by
or -1 for all experimenters
@type experimenter_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
tagged = {}
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
qs = conn.getQueryService()
common_clause = ""
if experimenter_id is not None and experimenter_id != -1:
params.addId(experimenter_id)
common_clause += '''
and obj.details.owner.id = :id
'''
# NB: Need to add lower(obj.name) to select so we can sort on it
common_clause += '''
order by lower(obj.name), obj.id
'''
params.add('tid', rlong(tag_id))
# Projects
q = '''
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as project_details_permissions,
(select count(id) from ProjectDatasetLink dil
where dil.parent=obj.id) as childCount)
from Project obj
join obj.annotationLinks alink
where alink.child.id=:tid
%s
''' % common_clause
projects = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["project_details_permissions"],
e[0]["childCount"]]
projects.append(_marshal_project(conn, e[0:5]))
tagged['projects'] = projects
# Datasets
q = '''
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as dataset_details_permissions,
(select count(id) from DatasetImageLink dil
where dil.parent=obj.id) as childCount)
from Dataset obj
join obj.annotationLinks alink
where alink.child.id=:tid
%s
''' % common_clause
datasets = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["dataset_details_permissions"],
e[0]["childCount"]]
datasets.append(_marshal_dataset(conn, e[0:5]))
tagged['datasets'] = datasets
# Images
extraValues = ""
extraObjs = ""
if load_pixels:
extraValues = """
,
pix.sizeX as sizeX,
pix.sizeY as sizeY,
pix.sizeZ as sizeZ
"""
extraObjs = " left outer join obj.pixels pix"
if date:
extraValues += """,
obj.details.creationEvent.time as date,
obj.acquisitionDate as acqDate
"""
q = """
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as image_details_permissions,
obj.fileset.id as filesetId %s)
from Image obj %s
join obj.annotationLinks alink
where alink.child.id=:tid
%s
""" % (extraValues, extraObjs, common_clause)
images = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
row = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["image_details_permissions"],
e[0]["filesetId"]]
kwargs = {}
if load_pixels:
d = [e[0]["sizeX"], e[0]["sizeY"], e[0]["sizeZ"]]
kwargs['row_pixels'] = d
if date:
kwargs['acqDate'] = e[0]['acqDate']
kwargs['date'] = e[0]['date']
images.append(_marshal_image(conn, row, **kwargs))
tagged['images'] = images
# Screens
q = '''
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as screen_details_permissions,
(select count(id) from ScreenPlateLink spl
where spl.parent=obj.id) as childCount)
from Screen obj
join obj.annotationLinks alink
where alink.child.id=:tid
%s
''' % common_clause
screens = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["screen_details_permissions"],
e[0]["childCount"]]
screens.append(_marshal_screen(conn, e[0:5]))
tagged['screens'] = screens
# Plate
q = '''
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as plate_details_permissions,
(select count(id) from PlateAcquisition pa
where pa.plate.id=obj.id) as childCount)
from Plate obj
join obj.annotationLinks alink
where alink.child.id=:tid
%s
''' % common_clause
plates = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["plate_details_permissions"],
e[0]["childCount"]]
plates.append(_marshal_plate(conn, e[0:5]))
tagged['plates'] = plates
# Plate Acquisitions
q = '''
select distinct new map(obj.id as id,
obj.name as name,
lower(obj.name) as lowername,
obj.details.owner.id as ownerId,
obj as plateacquisition_details_permissions,
obj.startTime as startTime,
obj.endTime as endTime)
from PlateAcquisition obj
join obj.annotationLinks alink
where alink.child.id=:tid
%s
''' % common_clause
plate_acquisitions = []
for e in qs.projection(q, params, service_opts):
e = unwrap(e)
e = [e[0]["id"],
e[0]["name"],
e[0]["ownerId"],
e[0]["plateacquisition_details_permissions"],
e[0]["startTime"],
e[0]["endTime"]]
plate_acquisitions.append(_marshal_plate_acquisition(conn, e[0:6]))
tagged['acquisitions'] = plate_acquisitions
return tagged
def _marshal_share(conn, row):
''' Given a Share row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* details.owner.id (rlong)
* child_count (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Share row to marshal
@type row L{list}
'''
share_id, active, expired, owner_id, child_count = row
share = dict()
share['id'] = unwrap(share_id)
share['ownerId'] = unwrap(owner_id)
share['childCount'] = unwrap(child_count)
share['isOwned'] = False
if unwrap(owner_id) == conn.getUserId() or conn.isAdmin():
share['isOwned'] = True
share['expired'] = False
if unwrap(expired) < time.time():
share['expired'] = True
share['active'] = unwrap(active)
return share
def marshal_shares(conn, member_id=-1, owner_id=-1,
page=1, limit=settings.PAGE):
''' Marshal shares for a given user.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param member_id The Experimenter (user) ID membership to filter by
@type member_id L{long}
@param owner_id The Experimenter (user) ID ownership to filter by
@type owner_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
shares = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
where_clause = ''
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
if member_id is not None and member_id != -1:
params.add('mid', rlong(member_id))
where_clause += ' and mem.child.id=:mid '
if owner_id is not None and owner_id != -1:
params.add('owid', rlong(owner_id))
where_clause += ' and mem.parent.owner.id=:owid '
qs = conn.getQueryService()
q = '''
select distinct mem.parent.id,
mem.parent.active,
extract(epoch from mem.parent.started)
+(mem.parent.timeToLive/1000),
mem.parent.owner.id,
mem.parent.itemCount
from ShareMember mem
where mem.parent.itemCount > 0
%s
order by mem.parent.id
''' % where_clause
for e in qs.projection(q, params, service_opts):
shares.append(_marshal_share(conn, e[0:5]))
return shares
def _marshal_discussion(conn, row):
''' Given a Discussion row (list) marshals it into a dictionary. Order
and type of columns in row is:
* id (rlong)
* details.owner.id (rlong)
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Discussion row to marshal
@type row L{list}
'''
discussion_id, active, expired, owner_id = row
discussion = dict()
discussion['id'] = unwrap(discussion_id)
discussion['ownerId'] = unwrap(owner_id)
discussion['isOwned'] = False
if unwrap(owner_id) == conn.getUserId() or conn.isAdmin():
discussion['isOwned'] = True
discussion['expired'] = False
if unwrap(expired) < time.time():
discussion['expired'] = True
discussion['active'] = unwrap(active)
return discussion
def marshal_discussions(conn, member_id=-1, owner_id=-1,
page=1, limit=settings.PAGE):
''' Marshal discussion for a given user.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param member_id The Experimenter (user) ID membership to filter by
@type member_id L{long}
@param owner_id The Experimenter (user) ID ownership to filter by
@type owner_id L{long}
@param page Page number of results to get. `None` or 0 for no paging
defaults to 1
@type page L{long}
@param limit The limit of results per page to get
defaults to the value set in settings.PAGE
@type page L{long}
'''
discussions = []
params = omero.sys.ParametersI()
service_opts = deepcopy(conn.SERVICE_OPTS)
where_clause = ''
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
if member_id is not None and member_id != -1:
params.add('mid', rlong(member_id))
where_clause += ' and mem.child.id=:mid '
if owner_id is not None and owner_id != -1:
params.add('owid', rlong(owner_id))
where_clause += ' and mem.parent.owner.id=:owid '
qs = conn.getQueryService()
q = '''
select distinct mem.parent.id,
mem.parent.active,
extract(epoch from mem.parent.started)
+(mem.parent.timeToLive/1000),
mem.parent.owner.id,
mem.parent.itemCount
from ShareMember mem
where mem.parent.itemCount = 0
%s
order by mem.parent.id
''' % where_clause
for e in qs.projection(q, params, service_opts):
discussions.append(_marshal_discussion(conn, e[0:4]))
return discussions
def _marshal_annotation(conn, annotation, link=None):
''' Given an OMERO annotation, marshals it into a dictionary.
@param conn OMERO gateway.
@type conn L{omero.gateway.BlitzGateway}
@param row The Dataset row to marshal
@type row L{list}
'''
ann = {}
ownerId = annotation.details.owner.id.val
ann['id'] = annotation.id.val
ann['ns'] = unwrap(annotation.ns)
ann['owner'] = {'id': ownerId}
creation = annotation.details.creationEvent._time
ann['date'] = _marshal_date(unwrap(creation))
perms = annotation.details.permissions
ann['permissions'] = {'canDelete': perms.canDelete(),
'canAnnotate': perms.canAnnotate(),
'canLink': perms.canLink(),
'canEdit': perms.canEdit()}
if link is not None:
ann['link'] = {}
ann['link']['id'] = link.id.val
ann['link']['owner'] = {'id': link.details.owner.id.val}
# Parent (Acquisition has no Name)
ann['link']['parent'] = {'id': link.parent.id.val,
'name': unwrap(link.parent.name),
'class': link.parent.__class__.__name__}
linkCreation = link.details.creationEvent._time
ann['link']['date'] = _marshal_date(unwrap(linkCreation))
p = link.details.permissions
ann['link']['permissions'] = {'canDelete': p.canDelete(),
'canAnnotate': p.canAnnotate(),
'canLink': p.canLink(),
'canEdit': p.canEdit()}
annClass = annotation.__class__.__name__
ann['class'] = annClass
if annClass == 'MapAnnotationI':
kvs = [[kv.name, kv.value] for kv in annotation.getMapValue()]
ann['values'] = kvs
elif annClass == 'FileAnnotationI' and annotation.file:
ann['file'] = {}
ann['file']['id'] = annotation.file.id.val
ann['file']['name'] = unwrap(annotation.file.name)
ann['file']['size'] = unwrap(annotation.file.size)
ann['file']['path'] = unwrap(annotation.file.path)
ann['permissions']['canDownload'] = not perms.isRestricted(
omero.constants.permissions.BINARYACCESS)
else:
for a in ['timeValue', 'termValue', 'longValue',
'doubleValue', 'boolValue', 'textValue']:
if hasattr(annotation, a):
ann[a] = unwrap(getattr(annotation, a))
return ann
def init_params(group_id, page, limit):
params = omero.sys.ParametersI()
# Paging
if page is not None and page > 0:
params.page((page-1) * limit, limit)
return params
def _marshal_exp_obj(experimenter):
exp = {}
exp['id'] = experimenter.id.val
exp['omeName'] = experimenter.omeName.val
exp['firstName'] = unwrap(experimenter.firstName)
exp['lastName'] = unwrap(experimenter.lastName)
return exp
def marshal_annotations(conn, project_ids=None, dataset_ids=None,
image_ids=None, screen_ids=None, plate_ids=None,
run_ids=None, ann_type=None,
group_id=-1, page=1, limit=settings.PAGE):
annotations = []
qs = conn.getQueryService()
service_opts = deepcopy(conn.SERVICE_OPTS)
# Set the desired group context
if group_id is None:
group_id = -1
service_opts.setOmeroGroup(group_id)
where_clause = ['pa.id in (:ids)']
# if experimenter_id is not None and experimenter_id != -1:
# params.addId('eid', rlong(experimenter_id))
# where_clause.append('dataset.details.owner.id = :eid')
if ann_type == 'tag':
where_clause.append('ch.class=TagAnnotation')
elif ann_type == 'file':
where_clause.append('ch.class=FileAnnotation')
elif ann_type == 'comment':
where_clause.append('ch.class=CommentAnnotation')
elif ann_type == 'rating':
where_clause.append('ch.class=LongAnnotation')
where_clause.append("ch.ns='openmicroscopy.org/omero/insight/rating'")
elif ann_type == 'map':
where_clause.append('ch.class=MapAnnotation')
elif ann_type == 'custom':
where_clause.append('ch.class!=MapAnnotation')
where_clause.append('ch.class!=TagAnnotation')
where_clause.append('ch.class!=FileAnnotation')
where_clause.append('ch.class!=CommentAnnotation')
where_clause.append("""(ch.ns=null or
ch.ns!='openmicroscopy.org/omero/insight/rating')""")
dtypes = ["Project", "Dataset", "Image",
"Screen", "Plate", "PlateAcquisition"]
obj_ids = [project_ids, dataset_ids, image_ids,
screen_ids, plate_ids, run_ids]
experimenters = {}
for dtype, ids in zip(dtypes, obj_ids):
if ids is None or len(ids) == 0:
continue
params = init_params(group_id, page, limit)
params.addIds(ids)
q = """
select oal from %sAnnotationLink as oal
join fetch oal.details.creationEvent
join fetch oal.details.owner
left outer join fetch oal.child as ch
left outer join fetch oal.parent as pa
join fetch ch.details.creationEvent
join fetch ch.details.owner
left outer join fetch ch.file as file
where %s
""" % (dtype, ' and '.join(where_clause))
for link in qs.findAllByQuery(q, params, service_opts):
ann = link.child
d = _marshal_annotation(conn, ann, link)
annotations.append(d)
exp = _marshal_exp_obj(link.details.owner)
experimenters[exp['id']] = exp
exp = _marshal_exp_obj(ann.details.owner)
experimenters[exp['id']] = exp
experimenters = experimenters.values()
# sort by id mostly for testing
experimenters.sort(key=lambda x: x['id'])
return annotations, experimenters
| gpl-2.0 |
SmartXiaoMing/clean_openwrt_sdk | staging_dir/host/lib/scons-2.1.0/SCons/Tool/fortran.py | 21 | 2056 | """SCons.Tool.fortran
Tool-specific initialization for a generic Posix f77/f90 Fortran compiler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/fortran.py 5357 2011/09/09 21:31:03 bdeegan"
import re
import SCons.Action
import SCons.Defaults
import SCons.Scanner.Fortran
import SCons.Tool
import SCons.Util
from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env
compilers = ['f95', 'f90', 'f77']
def generate(env):
add_all_to_env(env)
add_fortran_to_env(env)
fc = env.Detect(compilers) or 'f77'
env['SHFORTRAN'] = fc
env['FORTRAN'] = fc
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-2.0 |
DESHRAJ/fjord | vendor/packages/nose/functional_tests/test_doctest_plugin.py | 10 | 1280 | import os
import unittest
from nose.plugins.doctests import Doctest
from nose.plugins import PluginTester
support = os.path.join(os.path.dirname(__file__), 'support')
class TestDoctestPlugin(PluginTester, unittest.TestCase):
activate = '--with-doctest'
args = ['-v']
plugins = [Doctest()]
suitepath = os.path.join(support, 'dtt')
def runTest(self):
print str(self.output)
assert 'Doctest: some_mod ... ok' in self.output
assert 'Doctest: some_mod.foo ... ok' in self.output
assert 'Ran 2 tests' in self.output
assert str(self.output).strip().endswith('OK')
class TestDoctestFiles(PluginTester, unittest.TestCase):
activate = '--with-doctest'
args = ['-v', '--doctest-extension=.txt']
plugins = [Doctest()]
suitepath = os.path.join(support, 'dtt', 'docs')
def runTest(self):
print str(self.output)
expect = [
'Doctest: doc.txt ... ok',
'Doctest: errdoc.txt ... FAIL'
]
for line in self.output:
if not line.strip():
continue
if line.startswith('='):
break
self.assertEqual(line.strip(), expect.pop(0))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
cxxgtxy/tensorflow | tensorflow/python/grappler/layout_optimizer_test.py | 3 | 3867 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def weight(shape):
"""weights generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def bias(shape):
"""bias generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def conv2d(x, w):
"""conv2d returns a 2d convolution layer with full stride."""
return nn.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def two_layer_model():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = weight([5, 5, 1, 32])
b_conv1 = bias([32])
h_conv1 = nn.relu(conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
w_conv2 = weight([5, 5, 32, 64])
b_conv2 = bias([64])
h_conv2 = nn.relu(conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
return h_pool2
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def testTwoConvLayers(self):
if test.is_gpu_available(cuda_only=True):
output = two_layer_model()
with session.Session() as sess:
output_val_ref = sess.run(output)
rewrite_options = rewriter_config_pb2.RewriterConfig(
optimize_tensor_layout=True)
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options,
infer_shapes=True,
build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
with session.Session(config=config) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if node.name.startswith('LayoutOptimizerTranspose'):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self.assertIn('LayoutOptimizerTransposeNHWCToNCHW-Conv2D-Reshape', nodes)
self.assertIn('LayoutOptimizerTransposeNCHWToNHWC-Relu_1-MaxPool_1',
nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
if __name__ == '__main__':
test.main()
| apache-2.0 |
tchernomax/ansible | lib/ansible/modules/cloud/openstack/os_project_access.py | 35 | 6817 | #!/usr/bin/python
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_project_access
short_description: Manage OpenStack compute flavors acceess
extends_documentation_fragment: openstack
version_added: "2.5"
author: "Roberto Polli (@ioggstream)"
description:
- Add or remove flavor, volume_type or other resources access
from OpenStack.
options:
state:
description:
- Indicate desired state of the resource.
choices: ['present', 'absent']
required: false
default: present
target_project_id:
description:
- Project id.
required: true
resource_type:
description:
- The resource type (eg. nova_flavor, cinder_volume_type).
resource_name:
description:
- The resource name (eg. tiny).
availability_zone:
description:
- The availability zone of the resource.
requirements:
- "openstacksdk"
'''
EXAMPLES = '''
- name: "Enable access to tiny flavor to your tenant."
os_project_access:
cloud: mycloud
state: present
target_project_id: f0f1f2f3f4f5f67f8f9e0e1
resource_name: tiny
resource_type: nova_flavor
- name: "Disable access to the given flavor to project"
os_project_access:
cloud: mycloud
state: absent
target_project_id: f0f1f2f3f4f5f67f8f9e0e1
resource_name: tiny
resource_type: nova_flavor
'''
RETURN = '''
flavor:
description: Dictionary describing the flavor.
returned: On success when I(state) is 'present'
type: complex
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_module_kwargs, openstack_cloud_from_module
def main():
argument_spec = openstack_full_argument_spec(
state=dict(required=False, default='present',
choices=['absent', 'present']),
target_project_id=dict(required=True, type='str'),
resource_type=dict(required=True, type='str'),
resource_name=dict(required=True, type='str'),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['target_project_id'])
],
**module_kwargs)
sdk, cloud = openstack_cloud_from_module(module)
changed = False
state = module.params['state']
resource_name = module.params['resource_name']
resource_type = module.params['resource_type']
target_project_id = module.params['target_project_id']
try:
if resource_type == 'nova_flavor':
# returns Munch({'NAME_ATTR': 'name',
# 'tenant_id': u'37e55da59ec842649d84230f3a24eed5',
# 'HUMAN_ID': False,
# 'flavor_id': u'6d4d37b9-0480-4a8c-b8c9-f77deaad73f9',
# 'request_ids': [], 'human_id': None}),
_get_resource = cloud.get_flavor
_list_resource_access = cloud.list_flavor_access
_add_resource_access = cloud.add_flavor_access
_remove_resource_access = cloud.remove_flavor_access
elif resource_type == 'cinder_volume_type':
# returns [Munch({
# 'project_id': u'178cdb9955b047eea7afbe582038dc94',
# 'properties': {'request_ids': [], 'NAME_ATTR': 'name',
# 'human_id': None,
# 'HUMAN_ID': False},
# 'id': u'd5573023-b290-42c8-b232-7c5ca493667f'}),
_get_resource = cloud.get_volume_type
_list_resource_access = cloud.get_volume_type_access
_add_resource_access = cloud.add_volume_type_access
_remove_resource_access = cloud.remove_volume_type_access
else:
module.exit_json(changed=False,
resource_name=resource_name,
resource_type=resource_type,
error="Not implemented.")
resource = _get_resource(resource_name)
if not resource:
module.exit_json(changed=False,
resource_name=resource_name,
resource_type=resource_type,
error="Not found.")
resource_id = getattr(resource, 'id', resource['id'])
# _list_resource_access returns a list of dicts containing 'project_id'
acls = _list_resource_access(resource_id)
if not all(acl.get('project_id') for acl in acls):
module.exit_json(changed=False,
resource_name=resource_name,
resource_type=resource_type,
error="Missing project_id in resource output.")
allowed_tenants = [acl['project_id'] for acl in acls]
changed_access = any((
state == 'present' and target_project_id not in allowed_tenants,
state == 'absent' and target_project_id in allowed_tenants
))
if module.check_mode or not changed_access:
module.exit_json(changed=changed_access,
resource=resource,
id=resource_id)
if state == 'present':
_add_resource_access(
resource_id, target_project_id
)
elif state == 'absent':
_remove_resource_access(
resource_id, target_project_id
)
module.exit_json(changed=True,
resource=resource,
id=resource_id)
except sdk.exceptions.OpenStackCloudException as e:
module.fail_json(msg=str(e), **module.params)
if __name__ == '__main__':
main()
| gpl-3.0 |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/files/apps/headphones/lib/unidecode/x089.py | 252 | 4628 | data = (
'Ji ', # 0x00
'Zhi ', # 0x01
'Gua ', # 0x02
'Ken ', # 0x03
'Che ', # 0x04
'Ti ', # 0x05
'Ti ', # 0x06
'Fu ', # 0x07
'Chong ', # 0x08
'Xie ', # 0x09
'Bian ', # 0x0a
'Die ', # 0x0b
'Kun ', # 0x0c
'Duan ', # 0x0d
'Xiu ', # 0x0e
'Xiu ', # 0x0f
'He ', # 0x10
'Yuan ', # 0x11
'Bao ', # 0x12
'Bao ', # 0x13
'Fu ', # 0x14
'Yu ', # 0x15
'Tuan ', # 0x16
'Yan ', # 0x17
'Hui ', # 0x18
'Bei ', # 0x19
'Chu ', # 0x1a
'Lu ', # 0x1b
'Ena ', # 0x1c
'Hitoe ', # 0x1d
'Yun ', # 0x1e
'Da ', # 0x1f
'Gou ', # 0x20
'Da ', # 0x21
'Huai ', # 0x22
'Rong ', # 0x23
'Yuan ', # 0x24
'Ru ', # 0x25
'Nai ', # 0x26
'Jiong ', # 0x27
'Suo ', # 0x28
'Ban ', # 0x29
'Tun ', # 0x2a
'Chi ', # 0x2b
'Sang ', # 0x2c
'Niao ', # 0x2d
'Ying ', # 0x2e
'Jie ', # 0x2f
'Qian ', # 0x30
'Huai ', # 0x31
'Ku ', # 0x32
'Lian ', # 0x33
'Bao ', # 0x34
'Li ', # 0x35
'Zhe ', # 0x36
'Shi ', # 0x37
'Lu ', # 0x38
'Yi ', # 0x39
'Die ', # 0x3a
'Xie ', # 0x3b
'Xian ', # 0x3c
'Wei ', # 0x3d
'Biao ', # 0x3e
'Cao ', # 0x3f
'Ji ', # 0x40
'Jiang ', # 0x41
'Sen ', # 0x42
'Bao ', # 0x43
'Xiang ', # 0x44
'Chihaya ', # 0x45
'Pu ', # 0x46
'Jian ', # 0x47
'Zhuan ', # 0x48
'Jian ', # 0x49
'Zui ', # 0x4a
'Ji ', # 0x4b
'Dan ', # 0x4c
'Za ', # 0x4d
'Fan ', # 0x4e
'Bo ', # 0x4f
'Xiang ', # 0x50
'Xin ', # 0x51
'Bie ', # 0x52
'Rao ', # 0x53
'Man ', # 0x54
'Lan ', # 0x55
'Ao ', # 0x56
'Duo ', # 0x57
'Gui ', # 0x58
'Cao ', # 0x59
'Sui ', # 0x5a
'Nong ', # 0x5b
'Chan ', # 0x5c
'Lian ', # 0x5d
'Bi ', # 0x5e
'Jin ', # 0x5f
'Dang ', # 0x60
'Shu ', # 0x61
'Tan ', # 0x62
'Bi ', # 0x63
'Lan ', # 0x64
'Pu ', # 0x65
'Ru ', # 0x66
'Zhi ', # 0x67
'[?] ', # 0x68
'Shu ', # 0x69
'Wa ', # 0x6a
'Shi ', # 0x6b
'Bai ', # 0x6c
'Xie ', # 0x6d
'Bo ', # 0x6e
'Chen ', # 0x6f
'Lai ', # 0x70
'Long ', # 0x71
'Xi ', # 0x72
'Xian ', # 0x73
'Lan ', # 0x74
'Zhe ', # 0x75
'Dai ', # 0x76
'Tasuki ', # 0x77
'Zan ', # 0x78
'Shi ', # 0x79
'Jian ', # 0x7a
'Pan ', # 0x7b
'Yi ', # 0x7c
'Ran ', # 0x7d
'Ya ', # 0x7e
'Xi ', # 0x7f
'Xi ', # 0x80
'Yao ', # 0x81
'Feng ', # 0x82
'Tan ', # 0x83
'[?] ', # 0x84
'Biao ', # 0x85
'Fu ', # 0x86
'Ba ', # 0x87
'He ', # 0x88
'Ji ', # 0x89
'Ji ', # 0x8a
'Jian ', # 0x8b
'Guan ', # 0x8c
'Bian ', # 0x8d
'Yan ', # 0x8e
'Gui ', # 0x8f
'Jue ', # 0x90
'Pian ', # 0x91
'Mao ', # 0x92
'Mi ', # 0x93
'Mi ', # 0x94
'Mie ', # 0x95
'Shi ', # 0x96
'Si ', # 0x97
'Zhan ', # 0x98
'Luo ', # 0x99
'Jue ', # 0x9a
'Mi ', # 0x9b
'Tiao ', # 0x9c
'Lian ', # 0x9d
'Yao ', # 0x9e
'Zhi ', # 0x9f
'Jun ', # 0xa0
'Xi ', # 0xa1
'Shan ', # 0xa2
'Wei ', # 0xa3
'Xi ', # 0xa4
'Tian ', # 0xa5
'Yu ', # 0xa6
'Lan ', # 0xa7
'E ', # 0xa8
'Du ', # 0xa9
'Qin ', # 0xaa
'Pang ', # 0xab
'Ji ', # 0xac
'Ming ', # 0xad
'Ying ', # 0xae
'Gou ', # 0xaf
'Qu ', # 0xb0
'Zhan ', # 0xb1
'Jin ', # 0xb2
'Guan ', # 0xb3
'Deng ', # 0xb4
'Jian ', # 0xb5
'Luo ', # 0xb6
'Qu ', # 0xb7
'Jian ', # 0xb8
'Wei ', # 0xb9
'Jue ', # 0xba
'Qu ', # 0xbb
'Luo ', # 0xbc
'Lan ', # 0xbd
'Shen ', # 0xbe
'Di ', # 0xbf
'Guan ', # 0xc0
'Jian ', # 0xc1
'Guan ', # 0xc2
'Yan ', # 0xc3
'Gui ', # 0xc4
'Mi ', # 0xc5
'Shi ', # 0xc6
'Zhan ', # 0xc7
'Lan ', # 0xc8
'Jue ', # 0xc9
'Ji ', # 0xca
'Xi ', # 0xcb
'Di ', # 0xcc
'Tian ', # 0xcd
'Yu ', # 0xce
'Gou ', # 0xcf
'Jin ', # 0xd0
'Qu ', # 0xd1
'Jiao ', # 0xd2
'Jiu ', # 0xd3
'Jin ', # 0xd4
'Cu ', # 0xd5
'Jue ', # 0xd6
'Zhi ', # 0xd7
'Chao ', # 0xd8
'Ji ', # 0xd9
'Gu ', # 0xda
'Dan ', # 0xdb
'Zui ', # 0xdc
'Di ', # 0xdd
'Shang ', # 0xde
'Hua ', # 0xdf
'Quan ', # 0xe0
'Ge ', # 0xe1
'Chi ', # 0xe2
'Jie ', # 0xe3
'Gui ', # 0xe4
'Gong ', # 0xe5
'Hong ', # 0xe6
'Jie ', # 0xe7
'Hun ', # 0xe8
'Qiu ', # 0xe9
'Xing ', # 0xea
'Su ', # 0xeb
'Ni ', # 0xec
'Ji ', # 0xed
'Lu ', # 0xee
'Zhi ', # 0xef
'Zha ', # 0xf0
'Bi ', # 0xf1
'Xing ', # 0xf2
'Hu ', # 0xf3
'Shang ', # 0xf4
'Gong ', # 0xf5
'Zhi ', # 0xf6
'Xue ', # 0xf7
'Chu ', # 0xf8
'Xi ', # 0xf9
'Yi ', # 0xfa
'Lu ', # 0xfb
'Jue ', # 0xfc
'Xi ', # 0xfd
'Yan ', # 0xfe
'Xi ', # 0xff
)
| gpl-2.0 |
anurag313/scikit-learn | sklearn/setup.py | 225 | 2856 | import os
from os.path import join
import warnings
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info, BlasNotFoundError
import numpy
libraries = []
if os.name == 'posix':
libraries.append('m')
config = Configuration('sklearn', parent_package, top_path)
config.add_subpackage('__check_build')
config.add_subpackage('svm')
config.add_subpackage('datasets')
config.add_subpackage('datasets/tests')
config.add_subpackage('feature_extraction')
config.add_subpackage('feature_extraction/tests')
config.add_subpackage('cluster')
config.add_subpackage('cluster/tests')
config.add_subpackage('covariance')
config.add_subpackage('covariance/tests')
config.add_subpackage('cross_decomposition')
config.add_subpackage('decomposition')
config.add_subpackage('decomposition/tests')
config.add_subpackage("ensemble")
config.add_subpackage("ensemble/tests")
config.add_subpackage('feature_selection')
config.add_subpackage('feature_selection/tests')
config.add_subpackage('utils')
config.add_subpackage('utils/tests')
config.add_subpackage('externals')
config.add_subpackage('mixture')
config.add_subpackage('mixture/tests')
config.add_subpackage('gaussian_process')
config.add_subpackage('gaussian_process/tests')
config.add_subpackage('neighbors')
config.add_subpackage('neural_network')
config.add_subpackage('preprocessing')
config.add_subpackage('manifold')
config.add_subpackage('metrics')
config.add_subpackage('semi_supervised')
config.add_subpackage("tree")
config.add_subpackage("tree/tests")
config.add_subpackage('metrics/tests')
config.add_subpackage('metrics/cluster')
config.add_subpackage('metrics/cluster/tests')
# add cython extension module for isotonic regression
config.add_extension(
'_isotonic',
sources=['_isotonic.c'],
include_dirs=[numpy.get_include()],
libraries=libraries,
)
# some libs needs cblas, fortran-compiled BLAS will not be sufficient
blas_info = get_info('blas_opt', 0)
if (not blas_info) or (
('NO_ATLAS_INFO', 1) in blas_info.get('define_macros', [])):
config.add_library('cblas',
sources=[join('src', 'cblas', '*.c')])
warnings.warn(BlasNotFoundError.__doc__)
# the following packages depend on cblas, so they have to be build
# after the above.
config.add_subpackage('linear_model')
config.add_subpackage('utils')
# add the test directory
config.add_subpackage('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause |
sinbazhou/odoo | addons/l10n_fr_rib/__openerp__.py | 425 | 2754 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Numérigraphe SARL.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'French RIB Bank Details',
'version': '1.0',
'category': 'Hidden/Dependency',
'description': """
This module lets users enter the banking details of Partners in the RIB format (French standard for bank accounts details).
===========================================================================================================================
RIB Bank Accounts can be entered in the "Accounting" tab of the Partner form by specifying the account type "RIB".
The four standard RIB fields will then become mandatory:
--------------------------------------------------------
- Bank Code
- Office Code
- Account number
- RIB key
As a safety measure, OpenERP will check the RIB key whenever a RIB is saved, and
will refuse to record the data if the key is incorrect. Please bear in mind that
this can only happen when the user presses the 'save' button, for example on the
Partner Form. Since each bank account may relate to a Bank, users may enter the
RIB Bank Code in the Bank form - it will the pre-fill the Bank Code on the RIB
when they select the Bank. To make this easier, this module will also let users
find Banks using their RIB code.
The module base_iban can be a useful addition to this module, because French banks
are now progressively adopting the international IBAN format instead of the RIB format.
The RIB and IBAN codes for a single account can be entered by recording two Bank
Accounts in OpenERP: the first with the type 'RIB', the second with the type 'IBAN'.
""",
'author' : u'Numérigraphe SARL',
'depends': ['account', 'base_iban'],
'data': ['bank_data.xml', 'bank_view.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sharad/calibre | src/calibre/utils/smartypants.py | 4 | 34239 | #!/usr/bin/python
# vim:fileencoding=utf-8
__author__ = "Chad Miller <smartypantspy@chad.org>, Kovid Goyal <kovid at kovidgoyal.net>"
__description__ = "Smart-quotes, smart-ellipses, and smart-dashes for weblog entries in pyblosxom"
r"""
==============
smartypants.py
==============
----------------------------
SmartyPants ported to Python
----------------------------
Ported by `Chad Miller`_
Copyright (c) 2004, 2007 Chad Miller
original `SmartyPants`_ by `John Gruber`_
Copyright (c) 2003 John Gruber
Synopsis
========
A smart-quotes plugin for Pyblosxom_.
The priginal "SmartyPants" is a free web publishing plug-in for Movable Type,
Blosxom, and BBEdit that easily translates plain ASCII punctuation characters
into "smart" typographic punctuation HTML entities.
This software, *smartypants.py*, endeavours to be a functional port of
SmartyPants to Python, for use with Pyblosxom_.
Description
===========
SmartyPants can perform the following transformations:
- Straight quotes ( " and ' ) into "curly" quote HTML entities
- Backticks-style quotes (\`\`like this'') into "curly" quote HTML entities
- Dashes (``--`` and ``---``) into en- and em-dash entities
- Three consecutive dots (``...`` or ``. . .``) into an ellipsis entity
This means you can write, edit, and save your posts using plain old
ASCII straight quotes, plain dashes, and plain dots, but your published
posts (and final HTML output) will appear with smart quotes, em-dashes,
and proper ellipses.
SmartyPants does not modify characters within ``<pre>``, ``<code>``, ``<kbd>``,
``<math>`` or ``<script>`` tag blocks. Typically, these tags are used to
display text where smart quotes and other "smart punctuation" would not be
appropriate, such as source code or example markup.
Backslash Escapes
=================
If you need to use literal straight quotes (or plain hyphens and
periods), SmartyPants accepts the following backslash escape sequences
to force non-smart punctuation. It does so by transforming the escape
sequence into a decimal-encoded HTML entity:
(FIXME: table here.)
.. comment It sucks that there's a disconnect between the visual layout and table markup when special characters are involved.
.. comment ====== ===== =========
.. comment Escape Value Character
.. comment ====== ===== =========
.. comment \\\\\\\\ \ \\\\
.. comment \\\\" " "
.. comment \\\\' ' '
.. comment \\\\. . .
.. comment \\\\- - \-
.. comment \\\\` ` \`
.. comment ====== ===== =========
This is useful, for example, when you want to use straight quotes as
foot and inch marks: 6'2" tall; a 17" iMac.
Options
=======
For Pyblosxom users, the ``smartypants_attributes`` attribute is where you
specify configuration options.
Numeric values are the easiest way to configure SmartyPants' behavior:
"0"
Suppress all transformations. (Do nothing.)
"1"
Performs default SmartyPants transformations: quotes (including
\`\`backticks'' -style), em-dashes, and ellipses. "``--``" (dash dash)
is used to signify an em-dash; there is no support for en-dashes.
"2"
Same as smarty_pants="1", except that it uses the old-school typewriter
shorthand for dashes: "``--``" (dash dash) for en-dashes, "``---``"
(dash dash dash)
for em-dashes.
"3"
Same as smarty_pants="2", but inverts the shorthand for dashes:
"``--``" (dash dash) for em-dashes, and "``---``" (dash dash dash) for
en-dashes.
"-1"
Stupefy mode. Reverses the SmartyPants transformation process, turning
the HTML entities produced by SmartyPants into their ASCII equivalents.
E.g. "“" is turned into a simple double-quote ("), "—" is
turned into two dashes, etc.
The following single-character attribute values can be combined to toggle
individual transformations from within the smarty_pants attribute. For
example, to educate normal quotes and em-dashes, but not ellipses or
\`\`backticks'' -style quotes:
``py['smartypants_attributes'] = "1"``
"q"
Educates normal quote characters: (") and (').
"b"
Educates \`\`backticks'' -style double quotes.
"B"
Educates \`\`backticks'' -style double quotes and \`single' quotes.
"d"
Educates em-dashes.
"D"
Educates em-dashes and en-dashes, using old-school typewriter shorthand:
(dash dash) for en-dashes, (dash dash dash) for em-dashes.
"i"
Educates em-dashes and en-dashes, using inverted old-school typewriter
shorthand: (dash dash) for em-dashes, (dash dash dash) for en-dashes.
"e"
Educates ellipses.
"w"
Translates any instance of ``"`` into a normal double-quote character.
This should be of no interest to most people, but of particular interest
to anyone who writes their posts using Dreamweaver, as Dreamweaver
inexplicably uses this entity to represent a literal double-quote
character. SmartyPants only educates normal quotes, not entities (because
ordinarily, entities are used for the explicit purpose of representing the
specific character they represent). The "w" option must be used in
conjunction with one (or both) of the other quote options ("q" or "b").
Thus, if you wish to apply all SmartyPants transformations (quotes, en-
and em-dashes, and ellipses) and also translate ``"`` entities into
regular quotes so SmartyPants can educate them, you should pass the
following to the smarty_pants attribute:
The ``smartypants_forbidden_flavours`` list contains pyblosxom flavours for
which no Smarty Pants rendering will occur.
Caveats
=======
Why You Might Not Want to Use Smart Quotes in Your Weblog
---------------------------------------------------------
For one thing, you might not care.
Most normal, mentally stable individuals do not take notice of proper
typographic punctuation. Many design and typography nerds, however, break
out in a nasty rash when they encounter, say, a restaurant sign that uses
a straight apostrophe to spell "Joe's".
If you're the sort of person who just doesn't care, you might well want to
continue not caring. Using straight quotes -- and sticking to the 7-bit
ASCII character set in general -- is certainly a simpler way to live.
Even if you I *do* care about accurate typography, you still might want to
think twice before educating the quote characters in your weblog. One side
effect of publishing curly quote HTML entities is that it makes your
weblog a bit harder for others to quote from using copy-and-paste. What
happens is that when someone copies text from your blog, the copied text
contains the 8-bit curly quote characters (as well as the 8-bit characters
for em-dashes and ellipses, if you use these options). These characters
are not standard across different text encoding methods, which is why they
need to be encoded as HTML entities.
People copying text from your weblog, however, may not notice that you're
using curly quotes, and they'll go ahead and paste the unencoded 8-bit
characters copied from their browser into an email message or their own
weblog. When pasted as raw "smart quotes", these characters are likely to
get mangled beyond recognition.
That said, my own opinion is that any decent text editor or email client
makes it easy to stupefy smart quote characters into their 7-bit
equivalents, and I don't consider it my problem if you're using an
indecent text editor or email client.
Algorithmic Shortcomings
------------------------
One situation in which quotes will get curled the wrong way is when
apostrophes are used at the start of leading contractions. For example:
``'Twas the night before Christmas.``
In the case above, SmartyPants will turn the apostrophe into an opening
single-quote, when in fact it should be a closing one. I don't think
this problem can be solved in the general case -- every word processor
I've tried gets this wrong as well. In such cases, it's best to use the
proper HTML entity for closing single-quotes (``’``) by hand.
Bugs
====
To file bug reports or feature requests (other than topics listed in the
Caveats section above) please send email to: mailto:smartypantspy@chad.org
If the bug involves quotes being curled the wrong way, please send example
text to illustrate.
To Do list
----------
- Provide a function for use within templates to quote anything at all.
Version History
===============
1.5_1.6: Fri, 27 Jul 2007 07:06:40 -0400
- Fixed bug where blocks of precious unalterable text was instead
interpreted. Thanks to Le Roux and Dirk van Oosterbosch.
1.5_1.5: Sat, 13 Aug 2005 15:50:24 -0400
- Fix bogus magical quotation when there is no hint that the
user wants it, e.g., in "21st century". Thanks to Nathan Hamblen.
- Be smarter about quotes before terminating numbers in an en-dash'ed
range.
1.5_1.4: Thu, 10 Feb 2005 20:24:36 -0500
- Fix a date-processing bug, as reported by jacob childress.
- Begin a test-suite for ensuring correct output.
- Removed import of "string", since I didn't really need it.
(This was my first every Python program. Sue me!)
1.5_1.3: Wed, 15 Sep 2004 18:25:58 -0400
- Abort processing if the flavour is in forbidden-list. Default of
[ "rss" ] (Idea of Wolfgang SCHNERRING.)
- Remove stray virgules from en-dashes. Patch by Wolfgang SCHNERRING.
1.5_1.2: Mon, 24 May 2004 08:14:54 -0400
- Some single quotes weren't replaced properly. Diff-tesuji played
by Benjamin GEIGER.
1.5_1.1: Sun, 14 Mar 2004 14:38:28 -0500
- Support upcoming pyblosxom 0.9 plugin verification feature.
1.5_1.0: Tue, 09 Mar 2004 08:08:35 -0500
- Initial release
Version Information
-------------------
Version numbers will track the SmartyPants_ version numbers, with the addition
of an underscore and the smartypants.py version on the end.
New versions will be available at `http://wiki.chad.org/SmartyPantsPy`_
.. _http://wiki.chad.org/SmartyPantsPy: http://wiki.chad.org/SmartyPantsPy
Authors
=======
`John Gruber`_ did all of the hard work of writing this software in Perl for
`Movable Type`_ and almost all of this useful documentation. `Chad Miller`_
ported it to Python to use with Pyblosxom_.
Additional Credits
==================
Portions of the SmartyPants original work are based on Brad Choate's nifty
MTRegex plug-in. `Brad Choate`_ also contributed a few bits of source code to
this plug-in. Brad Choate is a fine hacker indeed.
`Jeremy Hedley`_ and `Charles Wiltgen`_ deserve mention for exemplary beta
testing of the original SmartyPants.
`Rael Dornfest`_ ported SmartyPants to Blosxom.
.. _Brad Choate: http://bradchoate.com/
.. _Jeremy Hedley: http://antipixel.com/
.. _Charles Wiltgen: http://playbacktime.com/
.. _Rael Dornfest: http://raelity.org/
Copyright and License
=====================
SmartyPants_ license::
Copyright (c) 2003 John Gruber
(http://daringfireball.net/)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name "SmartyPants" nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
This software is provided by the copyright holders and contributors "as
is" and any express or implied warranties, including, but not limited
to, the implied warranties of merchantability and fitness for a
particular purpose are disclaimed. In no event shall the copyright
owner or contributors be liable for any direct, indirect, incidental,
special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use,
data, or profits; or business interruption) however caused and on any
theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use
of this software, even if advised of the possibility of such damage.
smartypants.py license::
smartypants.py is a derivative work of SmartyPants.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
This software is provided by the copyright holders and contributors "as
is" and any express or implied warranties, including, but not limited
to, the implied warranties of merchantability and fitness for a
particular purpose are disclaimed. In no event shall the copyright
owner or contributors be liable for any direct, indirect, incidental,
special, exemplary, or consequential damages (including, but not
limited to, procurement of substitute goods or services; loss of use,
data, or profits; or business interruption) however caused and on any
theory of liability, whether in contract, strict liability, or tort
(including negligence or otherwise) arising in any way out of the use
of this software, even if advised of the possibility of such damage.
.. _John Gruber: http://daringfireball.net/
.. _Chad Miller: http://web.chad.org/
.. _Pyblosxom: http://roughingit.subtlehints.net/pyblosxom
.. _SmartyPants: http://daringfireball.net/projects/smartypants/
.. _Movable Type: http://www.movabletype.org/
"""
default_smartypants_attr = "1"
import re
# style added by Kovid
tags_to_skip_regex = re.compile(r"<(/)?(style|pre|code|kbd|script|math)[^>]*>", re.I)
self_closing_regex = re.compile(r'/\s*>$')
def verify_installation(request):
return 1
# assert the plugin is functional
def cb_story(args):
global default_smartypants_attr
try:
forbidden_flavours = args["entry"]["smartypants_forbidden_flavours"]
except KeyError:
forbidden_flavours = ["rss"]
try:
attributes = args["entry"]["smartypants_attributes"]
except KeyError:
attributes = default_smartypants_attr
if attributes is None:
attributes = default_smartypants_attr
entryData = args["entry"].getData()
try:
if args["request"]["flavour"] in forbidden_flavours:
return
except KeyError:
if "<" in args["entry"]["body"][0:15]: # sniff the stream
return # abort if it looks like escaped HTML. FIXME
# FIXME: make these configurable, perhaps?
args["entry"]["body"] = smartyPants(entryData, attributes)
args["entry"]["title"] = smartyPants(args["entry"]["title"], attributes)
# interal functions below here
def smartyPants(text, attr=default_smartypants_attr):
convert_quot = False # should we translate " entities into normal quotes?
# Parse attributes:
# 0 : do nothing
# 1 : set all
# 2 : set all, using old school en- and em- dash shortcuts
# 3 : set all, using inverted old school en and em- dash shortcuts
#
# q : quotes
# b : backtick quotes (``double'' only)
# B : backtick quotes (``double'' and `single')
# d : dashes
# D : old school dashes
# i : inverted old school dashes
# e : ellipses
# w : convert " entities to " for Dreamweaver users
skipped_tag_stack = []
do_dashes = "0"
do_backticks = "0"
do_quotes = "0"
do_ellipses = "0"
do_stupefy = "0"
if attr == "0":
# Do nothing.
return text
elif attr == "1":
do_quotes = "1"
do_backticks = "1"
do_dashes = "1"
do_ellipses = "1"
elif attr == "2":
# Do everything, turn all options on, use old school dash shorthand.
do_quotes = "1"
do_backticks = "1"
do_dashes = "2"
do_ellipses = "1"
elif attr == "3":
# Do everything, turn all options on, use inverted old school dash shorthand.
do_quotes = "1"
do_backticks = "1"
do_dashes = "3"
do_ellipses = "1"
elif attr == "-1":
# Special "stupefy" mode.
do_stupefy = "1"
else:
for c in attr:
if c == "q":
do_quotes = "1"
elif c == "b":
do_backticks = "1"
elif c == "B":
do_backticks = "2"
elif c == "d":
do_dashes = "1"
elif c == "D":
do_dashes = "2"
elif c == "i":
do_dashes = "3"
elif c == "e":
do_ellipses = "1"
elif c == "w":
convert_quot = "1"
else:
pass
# ignore unknown option
tokens = _tokenize(text)
result = []
in_pre = False
prev_token_last_char = ""
# This is a cheat, used to get some context
# for one-character tokens that consist of
# just a quote char. What we do is remember
# the last character of the previous text
# token, to use as context to curl single-
# character quote tokens correctly.
for cur_token in tokens:
if cur_token[0] == "tag":
# Don't mess with quotes inside some tags. This does not handle self <closing/> tags!
result.append(cur_token[1])
skip_match = tags_to_skip_regex.match(cur_token[1])
if skip_match is not None:
is_self_closing = self_closing_regex.search(skip_match.group()) is not None
if not is_self_closing:
if not skip_match.group(1):
skipped_tag_stack.append(skip_match.group(2).lower())
in_pre = True
else:
if len(skipped_tag_stack) > 0:
if skip_match.group(2).lower() == skipped_tag_stack[-1]:
skipped_tag_stack.pop()
else:
pass
# This close doesn't match the open. This isn't XHTML. We should barf here.
if len(skipped_tag_stack) == 0:
in_pre = False
else:
t = cur_token[1]
last_char = t[-1:] # Remember last char of this token before processing.
if not in_pre:
t = processEscapes(t)
if convert_quot != "0":
t = re.sub('"', '"', t)
if do_dashes != "0":
if do_dashes == "1":
t = educateDashes(t)
if do_dashes == "2":
t = educateDashesOldSchool(t)
if do_dashes == "3":
t = educateDashesOldSchoolInverted(t)
if do_ellipses != "0":
t = educateEllipses(t)
# Note: backticks need to be processed before quotes.
if do_backticks != "0":
t = educateBackticks(t)
if do_backticks == "2":
t = educateSingleBackticks(t)
if do_quotes != "0":
if t == "'":
# Special case: single-character ' token
if re.match("\S", prev_token_last_char):
t = "’"
else:
t = "‘"
elif t == '"':
# Special case: single-character " token
if re.match("\S", prev_token_last_char):
t = "”"
else:
t = "“"
else:
# Normal case:
t = educateQuotes(t)
if do_stupefy == "1":
t = stupefyEntities(t)
prev_token_last_char = last_char
result.append(t)
return "".join(result)
def educateQuotes(str):
"""
Parameter: String.
Returns: The string, with "educated" curly quote HTML entities.
Example input: "Isn't this fun?"
Example output: “Isn’t this fun?”
"""
punct_class = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]"""
# Special case if the very first character is a quote
# followed by punctuation at a non-word-break. Close the quotes by brute force:
str = re.sub(r"""^'(?=%s\\B)""" % (punct_class,), r"""’""", str)
str = re.sub(r"""^"(?=%s\\B)""" % (punct_class,), r"""”""", str)
# Special case for double sets of quotes, e.g.:
# <p>He said, "'Quoted' words in a larger quote."</p>
str = re.sub(r""""'(?=\w)""", """“‘""", str)
str = re.sub(r"""'"(?=\w)""", """‘“""", str)
str = re.sub(r'''""(?=\w)''', """““""", str)
str = re.sub(r"""''(?=\w)""", """‘‘""", str)
str = re.sub(r'''\"\'''', """”’""", str)
str = re.sub(r'''\'\"''', """’”""", str)
str = re.sub(r'''""''', """””""", str)
str = re.sub(r"""''""", """’’""", str)
# Special case for decade abbreviations (the '80s --> ’80s):
# See http://practicaltypography.com/apostrophes.html
str = re.sub(r"""(\W|^)'(?=\d{2}s)""", r"""\1’""", str)
# Measurements in feet and inches or longitude/latitude: 19' 43.5" --> 19′ 43.5″
str = re.sub(r'''(\W|^)([-0-9.]+\s*)'(\s*[-0-9.]+)"''', r'\1\2′\3″', str)
# Special case for Quotes at inside of other entities, e.g.:
# <p>A double quote--"within dashes"--would be nice.</p>
str = re.sub(r"""(?<=\W)"(?=\w)""", r"""“""", str)
str = re.sub(r"""(?<=\W)'(?=\w)""", r"""‘""", str)
str = re.sub(r"""(?<=\w)"(?=\W)""", r"""”""", str)
str = re.sub(r"""(?<=\w)'(?=\W)""", r"""’""", str)
# The following are commented out as smartypants tokenizes text by
# stripping out html tags. Therefore, there is no guarantee that the
# start-of-line and end-ol-line regex operators will match anything
# meaningful
# Special case for Quotes at end of line with a preceeding space (may change just to end of line)
# str = re.sub(r"""(?<=\s)"$""", r"""”""", str)
# str = re.sub(r"""(?<=\s)'$""", r"""’""", str)
# Special case for Quotes at beginning of line with a space - multiparagraph quoted text:
# str = re.sub(r"""^"(?=\s)""", r"""“""", str)
# str = re.sub(r"""^'(?=\s)""", r"""‘""", str)
close_class = r"""[^\ \t\r\n\[\{\(\-]"""
dec_dashes = r"""–|—"""
# Get most opening single quotes:
opening_single_quotes_regex = re.compile(r"""
(
\s | # a whitespace char, or
| # a non-breaking space entity, or
-- | # dashes, or
&[mn]dash; | # named dash entities
%s | # or decimal entities
&\#x201[34]; # or hex
)
' # the quote
(?=\w) # followed by a word character
""" % (dec_dashes,), re.VERBOSE)
str = opening_single_quotes_regex.sub(r"""\1‘""", str)
closing_single_quotes_regex = re.compile(r"""
(%s)
'
(?!\s | s\b | \d)
""" % (close_class,), re.VERBOSE)
str = closing_single_quotes_regex.sub(r"""\1’""", str)
closing_single_quotes_regex = re.compile(r"""
(%s)
'
(\s | s\b)
""" % (close_class,), re.VERBOSE)
str = closing_single_quotes_regex.sub(r"""\1’\2""", str)
# Any remaining single quotes should be opening ones:
str = re.sub(r"""'""", r"""‘""", str)
# Get most opening double quotes:
opening_double_quotes_regex = re.compile(r"""
(
\s | # a whitespace char, or
| # a non-breaking space entity, or
-- | # dashes, or
&[mn]dash; | # named dash entities
%s | # or decimal entities
&\#x201[34]; # or hex
)
" # the quote
(?=\w) # followed by a word character
""" % (dec_dashes,), re.VERBOSE)
str = opening_double_quotes_regex.sub(r"""\1“""", str)
# Double closing quotes:
closing_double_quotes_regex = re.compile(r"""
#(%s)? # character that indicates the quote should be closing
"
(?=\s)
""" % (close_class,), re.VERBOSE)
str = closing_double_quotes_regex.sub(r"""”""", str)
closing_double_quotes_regex = re.compile(r"""
(%s) # character that indicates the quote should be closing
"
""" % (close_class,), re.VERBOSE)
str = closing_double_quotes_regex.sub(r"""\1”""", str)
if str.endswith('-"'):
# A string that endswith -" is sometimes used for dialogue
str = str[:-1] + '”'
# Any remaining quotes should be opening ones.
str = re.sub(r'"', r"""“""", str)
return str
def educateBackticks(str):
"""
Parameter: String.
Returns: The string, with ``backticks'' -style double quotes
translated into HTML curly quote entities.
Example input: ``Isn't this fun?''
Example output: “Isn't this fun?”
"""
str = re.sub(r"""``""", r"""“""", str)
str = re.sub(r"""''""", r"""”""", str)
return str
def educateSingleBackticks(str):
"""
Parameter: String.
Returns: The string, with `backticks' -style single quotes
translated into HTML curly quote entities.
Example input: `Isn't this fun?'
Example output: ‘Isn’t this fun?’
"""
str = re.sub(r"""`""", r"""‘""", str)
str = re.sub(r"""'""", r"""’""", str)
return str
def educateDashes(str):
"""
Parameter: String.
Returns: The string, with each instance of "--" translated to
an em-dash HTML entity.
"""
str = re.sub(r"""---""", r"""–""", str) # en (yes, backwards)
str = re.sub(r"""--""", r"""—""", str) # em (yes, backwards)
return str
def educateDashesOldSchool(str):
"""
Parameter: String.
Returns: The string, with each instance of "--" translated to
an en-dash HTML entity, and each "---" translated to
an em-dash HTML entity.
"""
str = re.sub(r"""---""", r"""—""", str) # em (yes, backwards)
str = re.sub(r"""--""", r"""–""", str) # en (yes, backwards)
return str
def educateDashesOldSchoolInverted(str):
"""
Parameter: String.
Returns: The string, with each instance of "--" translated to
an em-dash HTML entity, and each "---" translated to
an en-dash HTML entity. Two reasons why: First, unlike the
en- and em-dash syntax supported by
EducateDashesOldSchool(), it's compatible with existing
entries written before SmartyPants 1.1, back when "--" was
only used for em-dashes. Second, em-dashes are more
common than en-dashes, and so it sort of makes sense that
the shortcut should be shorter to type. (Thanks to Aaron
Swartz for the idea.)
"""
str = re.sub(r"""---""", r"""–""", str) # em
str = re.sub(r"""--""", r"""—""", str) # en
return str
def educateEllipses(str):
"""
Parameter: String.
Returns: The string, with each instance of "..." translated to
an ellipsis HTML entity.
Example input: Huh...?
Example output: Huh…?
"""
str = re.sub(r"""\.\.\.""", r"""…""", str)
str = re.sub(r"""\. \. \.""", r"""…""", str)
return str
def stupefyEntities(str):
"""
Parameter: String.
Returns: The string, with each SmartyPants HTML entity translated to
its ASCII counterpart.
Example input: “Hello — world.”
Example output: "Hello -- world."
"""
str = re.sub(r"""–""", r"""-""", str) # en-dash
str = re.sub(r"""—""", r"""--""", str) # em-dash
str = re.sub(r"""‘""", r"""'""", str) # open single quote
str = re.sub(r"""’""", r"""'""", str) # close single quote
str = re.sub(r"""“""", r'''"''', str) # open double quote
str = re.sub(r"""”""", r'''"''', str) # close double quote
str = re.sub(r"""…""", r"""...""", str) # ellipsis
return str
def processEscapes(str):
r"""
Parameter: String.
Returns: The string, with after processing the following backslash
escape sequences. This is useful if you want to force a "dumb"
quote or other character to appear.
Escape Value
------ -----
\\ \
\" "
\' '
\. .
\- -
\` `
"""
str = re.sub(r"""\\\\""", r"""\""", str)
str = re.sub(r'''\\"''', r""""""", str)
str = re.sub(r"""\\'""", r"""'""", str)
str = re.sub(r"""\\\.""", r""".""", str)
str = re.sub(r"""\\-""", r"""-""", str)
str = re.sub(r"""\\`""", r"""`""", str)
return str
def _tokenize(str):
"""
Parameter: String containing HTML markup.
Returns: Reference to an array of the tokens comprising the input
string. Each token is either a tag (possibly with nested,
tags contained therein, such as <a href="<MTFoo>">, or a
run of text between tags. Each element of the array is a
two-element array; the first is either 'tag' or 'text';
the second is the actual value.
Based on the _tokenize() subroutine from Brad Choate's MTRegex plugin.
<http://www.bradchoate.com/past/mtregex.php>
"""
tokens = []
# depth = 6
# nested_tags = "|".join(['(?:<(?:[^<>]',] * depth) + (')*>)' * depth)
# match = r"""(?: <! ( -- .*? -- \s* )+ > ) | # comments
# (?: <\? .*? \?> ) | # directives
# %s # nested tags """ % (nested_tags,)
tag_soup = re.compile(r"""([^<]*)(<[^>]*>)""")
token_match = tag_soup.search(str)
previous_end = 0
while token_match is not None:
if token_match.group(1):
tokens.append(['text', token_match.group(1)])
tokens.append(['tag', token_match.group(2)])
previous_end = token_match.end()
token_match = tag_soup.search(str, token_match.end())
if previous_end < len(str):
tokens.append(['text', str[previous_end:]])
return tokens
def run_tests():
import unittest
sp = smartyPants
class TestSmartypantsAllAttributes(unittest.TestCase):
# the default attribute is "1", which means "all".
def test_dates(self):
self.assertEqual(sp("one two '60s"), "one two ’60s")
self.assertEqual(sp("1440-80's"), "1440-80’s")
self.assertEqual(sp("1440-'80s"), "1440-’80s")
self.assertEqual(sp("1440---'80s"), "1440–’80s")
self.assertEqual(sp("1960s"), "1960s") # no effect.
self.assertEqual(sp("1960's"), "1960’s")
self.assertEqual(sp("one two '60s"), "one two ’60s")
self.assertEqual(sp("'60s"), "’60s")
def test_measurements(self):
ae = self.assertEqual
ae(sp("one two 1.1'2.2\""), "one two 1.1′2.2″")
ae(sp("1' 2\""), "1′ 2″")
def test_skip_tags(self):
self.assertEqual(
sp("""<script type="text/javascript">\n<!--\nvar href = "http://www.google.com";\nvar linktext = "google";\ndocument.write('<a href="' + href + '">' + linktext + "</a>");\n//-->\n</script>"""), # noqa
"""<script type="text/javascript">\n<!--\nvar href = "http://www.google.com";\nvar linktext = "google";\ndocument.write('<a href="' + href + '">' + linktext + "</a>");\n//-->\n</script>""") # noqa
self.assertEqual(
sp("""<p>He said "Let's write some code." This code here <code>if True:\n\tprint "Okay"</code> is python code.</p>"""),
"""<p>He said “Let’s write some code.” This code here <code>if True:\n\tprint "Okay"</code> is python code.</p>""") # noqa
self.assertEqual(
sp('''<script/><p>It's ok</p>'''),
'''<script/><p>It’s ok</p>''')
def test_ordinal_numbers(self):
self.assertEqual(sp("21st century"), "21st century") # no effect.
self.assertEqual(sp("3rd"), "3rd") # no effect.
def test_educated_quotes(self):
self.assertEqual(sp('''"Isn't this fun?"'''), '''“Isn’t this fun?”''')
tests = unittest.defaultTestLoader.loadTestsFromTestCase(TestSmartypantsAllAttributes)
unittest.TextTestRunner(verbosity=4).run(tests)
if __name__ == "__main__":
run_tests()
| gpl-3.0 |
Senseg/Py4A | python3-alpha/python3-src/Lib/encodings/mac_romanian.py | 272 | 13661 | """ Python Character Mapping Codec mac_romanian generated from 'MAPPINGS/VENDORS/APPLE/ROMANIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-romanian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> CONTROL CHARACTER
'\x01' # 0x01 -> CONTROL CHARACTER
'\x02' # 0x02 -> CONTROL CHARACTER
'\x03' # 0x03 -> CONTROL CHARACTER
'\x04' # 0x04 -> CONTROL CHARACTER
'\x05' # 0x05 -> CONTROL CHARACTER
'\x06' # 0x06 -> CONTROL CHARACTER
'\x07' # 0x07 -> CONTROL CHARACTER
'\x08' # 0x08 -> CONTROL CHARACTER
'\t' # 0x09 -> CONTROL CHARACTER
'\n' # 0x0A -> CONTROL CHARACTER
'\x0b' # 0x0B -> CONTROL CHARACTER
'\x0c' # 0x0C -> CONTROL CHARACTER
'\r' # 0x0D -> CONTROL CHARACTER
'\x0e' # 0x0E -> CONTROL CHARACTER
'\x0f' # 0x0F -> CONTROL CHARACTER
'\x10' # 0x10 -> CONTROL CHARACTER
'\x11' # 0x11 -> CONTROL CHARACTER
'\x12' # 0x12 -> CONTROL CHARACTER
'\x13' # 0x13 -> CONTROL CHARACTER
'\x14' # 0x14 -> CONTROL CHARACTER
'\x15' # 0x15 -> CONTROL CHARACTER
'\x16' # 0x16 -> CONTROL CHARACTER
'\x17' # 0x17 -> CONTROL CHARACTER
'\x18' # 0x18 -> CONTROL CHARACTER
'\x19' # 0x19 -> CONTROL CHARACTER
'\x1a' # 0x1A -> CONTROL CHARACTER
'\x1b' # 0x1B -> CONTROL CHARACTER
'\x1c' # 0x1C -> CONTROL CHARACTER
'\x1d' # 0x1D -> CONTROL CHARACTER
'\x1e' # 0x1E -> CONTROL CHARACTER
'\x1f' # 0x1F -> CONTROL CHARACTER
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> CONTROL CHARACTER
'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
'\u2020' # 0xA0 -> DAGGER
'\xb0' # 0xA1 -> DEGREE SIGN
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa7' # 0xA4 -> SECTION SIGN
'\u2022' # 0xA5 -> BULLET
'\xb6' # 0xA6 -> PILCROW SIGN
'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
'\xae' # 0xA8 -> REGISTERED SIGN
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\u2122' # 0xAA -> TRADE MARK SIGN
'\xb4' # 0xAB -> ACUTE ACCENT
'\xa8' # 0xAC -> DIAERESIS
'\u2260' # 0xAD -> NOT EQUAL TO
'\u0102' # 0xAE -> LATIN CAPITAL LETTER A WITH BREVE
'\u0218' # 0xAF -> LATIN CAPITAL LETTER S WITH COMMA BELOW # for Unicode 3.0 and later
'\u221e' # 0xB0 -> INFINITY
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
'\xa5' # 0xB4 -> YEN SIGN
'\xb5' # 0xB5 -> MICRO SIGN
'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
'\u2211' # 0xB7 -> N-ARY SUMMATION
'\u220f' # 0xB8 -> N-ARY PRODUCT
'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
'\u222b' # 0xBA -> INTEGRAL
'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
'\u0103' # 0xBE -> LATIN SMALL LETTER A WITH BREVE
'\u0219' # 0xBF -> LATIN SMALL LETTER S WITH COMMA BELOW # for Unicode 3.0 and later
'\xbf' # 0xC0 -> INVERTED QUESTION MARK
'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
'\xac' # 0xC2 -> NOT SIGN
'\u221a' # 0xC3 -> SQUARE ROOT
'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
'\u2248' # 0xC5 -> ALMOST EQUAL TO
'\u2206' # 0xC6 -> INCREMENT
'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
'\xa0' # 0xCA -> NO-BREAK SPACE
'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
'\u2013' # 0xD0 -> EN DASH
'\u2014' # 0xD1 -> EM DASH
'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
'\xf7' # 0xD6 -> DIVISION SIGN
'\u25ca' # 0xD7 -> LOZENGE
'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
'\u2044' # 0xDA -> FRACTION SLASH
'\u20ac' # 0xDB -> EURO SIGN
'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u021a' # 0xDE -> LATIN CAPITAL LETTER T WITH COMMA BELOW # for Unicode 3.0 and later
'\u021b' # 0xDF -> LATIN SMALL LETTER T WITH COMMA BELOW # for Unicode 3.0 and later
'\u2021' # 0xE0 -> DOUBLE DAGGER
'\xb7' # 0xE1 -> MIDDLE DOT
'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
'\u2030' # 0xE4 -> PER MILLE SIGN
'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\uf8ff' # 0xF0 -> Apple logo
'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u02dc' # 0xF7 -> SMALL TILDE
'\xaf' # 0xF8 -> MACRON
'\u02d8' # 0xF9 -> BREVE
'\u02d9' # 0xFA -> DOT ABOVE
'\u02da' # 0xFB -> RING ABOVE
'\xb8' # 0xFC -> CEDILLA
'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
'\u02db' # 0xFE -> OGONEK
'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
linuxmint/mint4win | src/openpgp/sap/msg/__init__.py | 9 | 1392 | "Message support RFC 2440.10"
# TODO OpenPGP Grammar
#
# What I'd like to do is turn this all into a slick parser, or
# convert it to some form that an existing parser can understand. My
# guess is that doing so would make it much easier to add message
# types (and packet types) support more complex messages.
#
# TODO pkts integrity
#
# Right now, pkts is deep-copied to preserve the original on
# the outside.
#
# It might be nice to replace the pop() logic in pkts parsing
# with the strcalc(func, pkts[idx:idx+n], idx) deal used
# in packet parsing (advantages: speed?) and no need to make
# copies of outside the pkts). Either that or..
#
# Things are a little off-kilter in that the pkts is
# manipulated via functions instead of manipulating itself (like
# with list.pop()). One solution would be to extend list
# functionality with a PacketList class that defined a organize_msgs
# method similar to pop().
#
# TODO nested messages
#
# The way things work now, messages are built up like this -
#
# For a particular message pattern:
# 1) see if the packet falls in line with the message pattern (if
# so, keep it, if not, ditch it and fail) 2) if the pattern accepts
# a nested message, recurse the pattern search and add the nested
# message instance 3) see if the message pattern has been matched
# (if so, return, if not, repeat)
| gpl-2.0 |
Reuben-Thorpe/Code.Eval | advent.of.code/2015/python/day09/tsp.py | 1 | 2499 | """ --- Reuben Thorpe (2015) ---
The TSP class provides novice solutions to the 'complete and semetric traveling
salsmen problem' for advent of code 2015.
To be added - [held-karp algorithem, monte-carlo simulation]
"""
from itertools import permutations
from time import process_time
import math
class TSP:
def __init__(self, fileName):
self.fileName = fileName
self.data = [line.strip().replace("to", "").replace("=", "").split()
for line in open(self.fileName, 'r')]
def bruteForce(self, **kwargs):
mode = kwargs.get('mode', None)
print("\n ---- Initiating brute force with", self.fileName, "----\n")
distance = []
positions = list({line[i] for line in self.data for i in range(2)})
n = len(positions)
if (mode == 'open'):
"""Solutions for the open ended TSP
paths start and end at unique nodes
"""
time = process_time()
reverseRepeat = (math.factorial(n)/2)
for i, perm in enumerate(permutations(positions)):
if i > reverseRepeat:
break
distance.append(sum(int(route[2]) for i in range(n-1) for
route in self.data if perm[i] in route and
perm[i+1] in route))
else:
"""Solutions for the standard TSP
paths start and end at the same node
"""
time = process_time()
reverseRepeat = (math.factorial(n-1)/2)
startPosition = positions[0]
for i, perm in enumerate(permutations(positions[1:])):
if i > reverseRepeat:
break
perm = (startPosition,) + perm + (startPosition,)
distance.append(sum(int(route[2]) for i in range(n) for
route in self.data if perm[i] in route and
perm[i+1] in route))
print("Computed minimum : ", min(distance))
print("Computed maximum : ", max(distance))
print("Time elapsed (s) : ", (process_time()-time))
print(" Variation : ", mode)
print(" Permutations : ", len(distance), "\n")
if __name__ == '__main__':
data = TSP('input.txt')
# Solutions to Advent of code Day9 (open ended TSP)
data.bruteForce(mode='open')
# Solutions to the standard TSP search
data.bruteForce()
| gpl-3.0 |
DefyVentures/edx-platform | lms/djangoapps/lms_migration/management/commands/create_user.py | 103 | 5169 | #!/usr/bin/python
#
# File: create_user.py
#
# Create user. Prompt for groups and ExternalAuthMap
import os
import sys
import string # pylint: disable=deprecated-module
import datetime
from getpass import getpass
import json
from random import choice
import readline
from django.core.management.base import BaseCommand
from student.models import UserProfile, Registration
from external_auth.models import ExternalAuthMap
from django.contrib.auth.models import User, Group
from pytz import UTC
class MyCompleter(object): # Custom completer
def __init__(self, options):
self.options = sorted(options)
def complete(self, text, state):
if state == 0: # on first trigger, build possible matches
if text: # cache matches (entries that start with entered text)
self.matches = [
option
for option in self.options
if option and option.startswith(text)
]
else: # no text entered, all matches possible
self.matches = self.options[:]
# return match indexed by state
try:
return self.matches[state]
except IndexError:
return None
def GenPasswd(length=8, chars=string.letters + string.digits):
return ''.join([choice(chars) for dummy0 in range(length)])
#-----------------------------------------------------------------------------
# main command
class Command(BaseCommand):
help = "Create user, interactively; can add ExternalAuthMap for MIT user if email@MIT.EDU resolves properly."
def handle(self, *args, **options):
while True:
uname = raw_input('username: ')
if User.objects.filter(username=uname):
print "username %s already taken" % uname
else:
break
make_eamap = False
if raw_input('Create MIT ExternalAuth? [n] ').lower() == 'y':
email = '%s@MIT.EDU' % uname
if not email.endswith('@MIT.EDU'):
print "Failed - email must be @MIT.EDU"
sys.exit(-1)
mit_domain = 'ssl:MIT'
if ExternalAuthMap.objects.filter(external_id=email, external_domain=mit_domain):
print "Failed - email %s already exists as external_id" % email
sys.exit(-1)
make_eamap = True
password = GenPasswd(12)
# get name from kerberos
try:
kname = os.popen("finger %s | grep 'name:'" % email).read().strip().split('name: ')[1].strip()
except:
kname = ''
name = raw_input('Full name: [%s] ' % kname).strip()
if name == '':
name = kname
print "name = %s" % name
else:
while True:
password = getpass()
password2 = getpass()
if password == password2:
break
print "Oops, passwords do not match, please retry"
while True:
email = raw_input('email: ')
if User.objects.filter(email=email):
print "email %s already taken" % email
else:
break
name = raw_input('Full name: ')
user = User(username=uname, email=email, is_active=True)
user.set_password(password)
try:
user.save()
except IntegrityError:
print "Oops, failed to create user %s, IntegrityError" % user
raise
r = Registration()
r.register(user)
up = UserProfile(user=user)
up.name = name
up.save()
if make_eamap:
credentials = "/C=US/ST=Massachusetts/O=Massachusetts Institute of Technology/OU=Client CA v1/CN=%s/emailAddress=%s" % (name, email)
eamap = ExternalAuthMap(
external_id=email,
external_email=email,
external_domain=mit_domain,
external_name=name,
internal_password=password,
external_credentials=json.dumps(credentials),
)
eamap.user = user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
print "User %s created successfully!" % user
if not raw_input('Add user %s to any groups? [n] ' % user).lower() == 'y':
sys.exit(0)
print "Here are the groups available:"
groups = [str(g.name) for g in Group.objects.all()]
print groups
completer = MyCompleter(groups)
readline.set_completer(completer.complete)
readline.parse_and_bind('tab: complete')
while True:
gname = raw_input("Add group (tab to autocomplete, empty line to end): ")
if not gname:
break
if gname not in groups:
print "Unknown group %s" % gname
continue
g = Group.objects.get(name=gname)
user.groups.add(g)
print "Added %s to group %s" % (user, g)
print "Done!"
| agpl-3.0 |
TwolDE2/enigma2 | lib/python/Screens/ChoiceBox.py | 6 | 7948 | from Screens.Screen import Screen
from Components.ActionMap import NumberActionMap
from Components.Label import Label
from Components.ChoiceList import ChoiceEntryComponent, ChoiceList
from Components.Sources.StaticText import StaticText
from Components.Pixmap import Pixmap
import enigma
class ChoiceBox(Screen):
def __init__(self, session, title="", list=None, keys=None, selection=0, skin_name=None, text="", windowTitle = None, allow_cancel = True, titlebartext = _("Choice Box")):
if not windowTitle: #for compatibility
windowTitle = titlebartext
if not list: list = []
if not skin_name: skin_name = []
Screen.__init__(self, session)
self.allow_cancel = allow_cancel
if isinstance(skin_name, str):
skin_name = [skin_name]
self.skinName = skin_name + ["ChoiceBox"]
self["text"] = Label()
self.var = ""
if skin_name and 'SoftwareUpdateChoices' in skin_name and var and var in ('unstable', 'updating', 'stable', 'unknown'):
self.var = var
self['feedStatusMSG'] = Label()
self['tl_off'] = Pixmap()
self['tl_red'] = Pixmap()
self['tl_yellow'] = Pixmap()
self['tl_green'] = Pixmap()
if title:
title = _(title)
if len(title) < 55 and title.find('\n') == -1:
Screen.setTitle(self, title)
elif title.find('\n') != -1:
temptext = title.split('\n')
if len(temptext[0]) < 55:
Screen.setTitle(self, temptext[0])
count = 2
labeltext = ""
while len(temptext) >= count:
if labeltext:
labeltext += '\n'
labeltext = labeltext + temptext[count-1]
count += 1
print 'count',count
self["text"].setText(labeltext)
else:
self["text"] = Label(title)
else:
self["text"] = Label(title)
elif text:
self["text"] = Label(_(text))
self.list = []
self.summarylist = []
if keys is None:
self.__keys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue", "text" ] + (len(list) - 10) * [""]
else:
self.__keys = keys + (len(list) - len(keys)) * [""]
self.keymap = {}
pos = 0
for x in list:
strpos = str(self.__keys[pos])
self.list.append(ChoiceEntryComponent(key = strpos, text = x))
if self.__keys[pos] != "":
self.keymap[self.__keys[pos]] = list[pos]
self.summarylist.append((self.__keys[pos], x[0]))
pos += 1
self["windowtitle"] = Label(_(windowTitle))
self["list"] = ChoiceList(list = self.list, selection = selection)
self["summary_list"] = StaticText()
self["summary_selection"] = StaticText()
self.updateSummary(selection)
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions"],
{
"ok": self.go,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
"red": self.keyRed,
"green": self.keyGreen,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"text": self.keyText,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right
}, -1)
self["cancelaction"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions"],
{
"back": self.cancel,
}, -1)
self.onShown.append(self.onshow)
def onshow(self):
if self.skinName and 'SoftwareUpdateChoices' in self.skinName and self.var and self.var in ('unstable', 'updating', 'stable', 'unknown'):
status_msgs = {'stable': _('Feeds status: Stable'), 'unstable': _('Feeds status: Unstable'), 'updating': _('Feeds status: Updating'), 'unknown': _('No connection')}
self['feedStatusMSG'].setText(status_msgs[self.var])
self['tl_off'].hide()
self['tl_red'].hide()
self['tl_yellow'].hide()
self['tl_green'].hide()
if self.var == 'unstable':
self['tl_red'].show()
elif self.var == 'updating':
self['tl_yellow'].show()
elif self.var == 'stable':
self['tl_green'].show()
else:
self['tl_off'].show()
def autoResize(self):
desktop_w = enigma.getDesktop(0).size().width()
desktop_h = enigma.getDesktop(0).size().height()
count = len(self.list)
itemheight = self["list"].getItemHeight()
if count > 15:
count = 15
if not self["text"].text:
# move list
textsize = (520, 0)
listsize = (520, itemheight*count)
self["list"].instance.move(enigma.ePoint(0, 0))
self["list"].instance.resize(enigma.eSize(*listsize))
else:
textsize = self["text"].getSize()
if textsize[0] < textsize[1]:
textsize = (textsize[1],textsize[0]+10)
if textsize[0] > 520:
textsize = (textsize[0], textsize[1]+itemheight)
else:
textsize = (520, textsize[1]+itemheight)
listsize = (textsize[0], itemheight*count)
# resize label
self["text"].instance.resize(enigma.eSize(*textsize))
self["text"].instance.move(enigma.ePoint(10, 10))
# move list
self["list"].instance.move(enigma.ePoint(0, textsize[1]))
self["list"].instance.resize(enigma.eSize(*listsize))
wsizex = textsize[0]
wsizey = textsize[1]+listsize[1]
wsize = (wsizex, wsizey)
self.instance.resize(enigma.eSize(*wsize))
# center window
self.instance.move(enigma.ePoint((desktop_w-wsizex)/2, (desktop_h-wsizey)/2))
def left(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.pageUp)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0:
break
def right(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.pageDown)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0:
break
def up(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.moveUp)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0:
break
def down(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.moveDown)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == len(self["list"].list) - 1:
break
# runs a number shortcut
def keyNumberGlobal(self, number):
self.goKey(str(number))
# runs the current selected entry
def go(self):
cursel = self["list"].l.getCurrentSelection()
if cursel:
self.goEntry(cursel[0])
else:
self.cancel()
# runs a specific entry
def goEntry(self, entry):
if entry and len(entry) > 3 and isinstance(entry[1], str) and entry[1] == "CALLFUNC":
arg = entry[3]
entry[2](arg)
elif entry and len(entry) > 2 and isinstance(entry[1], str) and entry[1] == "CALLFUNC":
entry[2](None)
else:
self.close(entry)
# lookups a key in the keymap, then runs it
def goKey(self, key):
if self.keymap.has_key(key):
entry = self.keymap[key]
self.goEntry(entry)
# runs a color shortcut
def keyRed(self):
self.goKey("red")
def keyGreen(self):
self.goKey("green")
def keyYellow(self):
self.goKey("yellow")
def keyBlue(self):
self.goKey("blue")
def keyText(self):
self.goKey("text")
def updateSummary(self, curpos=0):
pos = 0
summarytext = ""
for entry in self.summarylist:
if curpos-2 < pos < curpos+5:
if pos == curpos:
summarytext += ">"
self["summary_selection"].setText(entry[1])
else:
summarytext += entry[0]
summarytext += ' ' + entry[1] + '\n'
pos += 1
self["summary_list"].setText(summarytext)
def cancel(self):
if self.allow_cancel:
self.close(None)
| gpl-2.0 |
rickmendes/ansible-modules-extras | windows/win_unzip.py | 47 | 3630 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_unzip
version_added: "2.0"
short_description: Unzips compressed files and archives on the Windows node
description:
- Unzips compressed files and archives. For extracting any compression types other than .zip, the PowerShellCommunityExtensions (PSCX) Module is required. This module (in conjunction with PSCX) has the ability to recursively unzip files within the src zip file provided and also functionality for many other compression types. If the destination directory does not exist, it will be created before unzipping the file. Specifying rm parameter will force removal of the src file after extraction.
options:
src:
description:
- File to be unzipped (provide absolute path)
required: true
dest:
description:
- Destination of zip file (provide absolute path of directory). If it does not exist, the directory will be created.
required: true
rm:
description:
- Remove the zip file, after unzipping
required: no
choices:
- true
- false
- yes
- no
default: false
recurse:
description:
- Recursively expand zipped files within the src file.
required: no
default: false
choices:
- true
- false
- yes
- no
creates:
description:
- If this file or directory exists the specified src will not be extracted.
required: no
default: null
author: Phil Schwartz
'''
EXAMPLES = r'''
# This unzips a library that was downloaded with win_get_url, and removes the file after extraction
$ ansible -i hosts -m win_unzip -a "src=C:\LibraryToUnzip.zip dest=C:\Lib rm=true" all
# Playbook example
# Simple unzip
---
- name: Unzip a bz2 (BZip) file
win_unzip:
src: "C:\Users\Phil\Logs.bz2"
dest: "C:\Users\Phil\OldLogs"
creates: "C:\Users\Phil\OldLogs"
# This playbook example unzips a .zip file and recursively decompresses the contained .gz files and removes all unneeded compressed files after completion.
---
- name: Unzip ApplicationLogs.zip and decompress all GZipped log files
hosts: all
gather_facts: false
tasks:
- name: Recursively decompress GZ files in ApplicationLogs.zip
win_unzip:
src: C:\Downloads\ApplicationLogs.zip
dest: C:\Application\Logs
recurse: yes
rm: true
# Install PSCX to use for extracting a gz file
- name: Grab PSCX msi
win_get_url:
url: 'http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=pscx&DownloadId=923562&FileTime=130585918034470000&Build=20959'
dest: 'C:\pscx.msi'
- name: Install PSCX
win_msi:
path: 'C:\pscx.msi'
- name: Unzip gz log
win_unzip:
src: "C:\Logs\application-error-logs.gz"
dest: "C:\ExtractedLogs\application-error-logs"
'''
| gpl-3.0 |
platformio/platformio-core | platformio/project/options.py | 1 | 27408 | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=redefined-builtin, too-many-arguments
import os
from collections import OrderedDict
import click
from platformio import fs
class ConfigOption(object): # pylint: disable=too-many-instance-attributes
def __init__(
self,
scope,
group,
name,
description,
type=str,
multiple=False,
sysenvvar=None,
buildenvvar=None,
oldnames=None,
default=None,
):
self.scope = scope
self.group = group
self.name = name
self.description = description
self.type = type
self.multiple = multiple
self.sysenvvar = sysenvvar
self.buildenvvar = buildenvvar
self.oldnames = oldnames
self.default = default
def as_dict(self):
result = dict(
scope=self.scope,
group=self.group,
name=self.name,
description=self.description,
type="string",
multiple=self.multiple,
sysenvvar=self.sysenvvar,
default=self.default,
)
if isinstance(self.type, click.ParamType):
result["type"] = self.type.name
if isinstance(self.type, (click.IntRange, click.FloatRange)):
result["min"] = self.type.min
result["max"] = self.type.max
if isinstance(self.type, click.Choice):
result["choices"] = self.type.choices
return result
def ConfigPlatformioOption(*args, **kwargs):
return ConfigOption("platformio", *args, **kwargs)
def ConfigEnvOption(*args, **kwargs):
return ConfigOption("env", *args, **kwargs)
ProjectOptions = OrderedDict(
[
("%s.%s" % (option.scope, option.name), option)
for option in [
#
# [platformio]
#
ConfigPlatformioOption(
group="generic",
name="description",
description="Describe a project with a short information",
),
ConfigPlatformioOption(
group="generic",
name="default_envs",
description=(
"Configure a list with environments which PlatformIO should "
"process by default"
),
oldnames=["env_default"],
multiple=True,
sysenvvar="PLATFORMIO_DEFAULT_ENVS",
),
ConfigPlatformioOption(
group="generic",
name="extra_configs",
description=(
"Extend main configuration with the extra configuration files"
),
multiple=True,
),
# Dirs
ConfigPlatformioOption(
group="directory",
name="core_dir",
description=(
"PlatformIO Core location where it keeps installed development "
"platforms, packages, global libraries, "
"and other internal information"
),
oldnames=["home_dir"],
sysenvvar="PLATFORMIO_CORE_DIR",
default=os.path.join(fs.expanduser("~"), ".platformio"),
),
ConfigPlatformioOption(
group="directory",
name="globallib_dir",
description=(
"A library folder/storage where PlatformIO Library Dependency "
"Finder (LDF) looks for global libraries"
),
sysenvvar="PLATFORMIO_GLOBALLIB_DIR",
default=os.path.join("$PROJECT_CORE_DIR", "lib"),
),
ConfigPlatformioOption(
group="directory",
name="platforms_dir",
description=(
"A location where PlatformIO Core keeps installed development "
"platforms"
),
sysenvvar="PLATFORMIO_PLATFORMS_DIR",
default=os.path.join("$PROJECT_CORE_DIR", "platforms"),
),
ConfigPlatformioOption(
group="directory",
name="packages_dir",
description=(
"A location where PlatformIO Core keeps installed packages"
),
sysenvvar="PLATFORMIO_PACKAGES_DIR",
default=os.path.join("$PROJECT_CORE_DIR", "packages"),
),
ConfigPlatformioOption(
group="directory",
name="cache_dir",
description=(
"A location where PlatformIO Core stores caching information "
"(requests to PlatformIO Registry, downloaded packages and "
"other service information)"
),
sysenvvar="PLATFORMIO_CACHE_DIR",
default=os.path.join("$PROJECT_CORE_DIR", ".cache"),
),
ConfigPlatformioOption(
group="directory",
name="build_cache_dir",
description=(
"A location where PlatformIO Core keeps derived files from a "
"build system (objects, firmwares, ELFs) and caches them between "
"build environments"
),
sysenvvar="PLATFORMIO_BUILD_CACHE_DIR",
),
ConfigPlatformioOption(
group="directory",
name="workspace_dir",
description=(
"A path to a project workspace directory where PlatformIO keeps "
"by default compiled objects, static libraries, firmwares, and "
"external library dependencies"
),
sysenvvar="PLATFORMIO_WORKSPACE_DIR",
default=os.path.join("$PROJECT_DIR", ".pio"),
),
ConfigPlatformioOption(
group="directory",
name="build_dir",
description=(
"PlatformIO Build System uses this folder for project environments"
" to store compiled object files, static libraries, firmwares, "
"and other cached information"
),
sysenvvar="PLATFORMIO_BUILD_DIR",
default=os.path.join("$PROJECT_WORKSPACE_DIR", "build"),
),
ConfigPlatformioOption(
group="directory",
name="libdeps_dir",
description=(
"Internal storage where Library Manager will install project "
"dependencies declared via `lib_deps` option"
),
sysenvvar="PLATFORMIO_LIBDEPS_DIR",
default=os.path.join("$PROJECT_WORKSPACE_DIR", "libdeps"),
),
ConfigPlatformioOption(
group="directory",
name="include_dir",
description=(
"A default location for project header files. PlatformIO Build "
"System automatically adds this path to CPPPATH scope"
),
sysenvvar="PLATFORMIO_INCLUDE_DIR",
default=os.path.join("$PROJECT_DIR", "include"),
),
ConfigPlatformioOption(
group="directory",
name="src_dir",
description=(
"A default location where PlatformIO Build System looks for the "
"project C/C++ source files"
),
sysenvvar="PLATFORMIO_SRC_DIR",
default=os.path.join("$PROJECT_DIR", "src"),
),
ConfigPlatformioOption(
group="directory",
name="lib_dir",
description="A storage for the custom/private project libraries",
sysenvvar="PLATFORMIO_LIB_DIR",
default=os.path.join("$PROJECT_DIR", "lib"),
),
ConfigPlatformioOption(
group="directory",
name="data_dir",
description=(
"A data directory to store contents which can be uploaded to "
"file system (SPIFFS, etc.)"
),
sysenvvar="PLATFORMIO_DATA_DIR",
default=os.path.join("$PROJECT_DIR", "data"),
),
ConfigPlatformioOption(
group="directory",
name="test_dir",
description=(
"A location where PlatformIO Unit Testing engine looks for "
"test source files"
),
sysenvvar="PLATFORMIO_TEST_DIR",
default=os.path.join("$PROJECT_DIR", "test"),
),
ConfigPlatformioOption(
group="directory",
name="boards_dir",
description="A global storage for custom board manifests",
sysenvvar="PLATFORMIO_BOARDS_DIR",
default=os.path.join("$PROJECT_DIR", "boards"),
),
ConfigPlatformioOption(
group="directory",
name="shared_dir",
description=(
"A location which PlatformIO Remote Development service uses to "
"synchronize extra files between remote machines"
),
sysenvvar="PLATFORMIO_SHARED_DIR",
default=os.path.join("$PROJECT_DIR", "shared"),
),
#
# [env]
#
# Platform
ConfigEnvOption(
group="platform",
name="platform",
description="A name or specification of development platform",
buildenvvar="PIOPLATFORM",
),
ConfigEnvOption(
group="platform",
name="platform_packages",
description="Custom packages and specifications",
multiple=True,
),
# Board
ConfigEnvOption(
group="platform",
name="board",
description="A board ID",
buildenvvar="BOARD",
),
ConfigEnvOption(
group="platform",
name="framework",
description="A list of project dependent frameworks",
multiple=True,
buildenvvar="PIOFRAMEWORK",
),
ConfigEnvOption(
group="platform",
name="board_build.mcu",
description="A custom board MCU",
oldnames=["board_mcu"],
buildenvvar="BOARD_MCU",
),
ConfigEnvOption(
group="platform",
name="board_build.f_cpu",
description="A custom MCU frequency",
oldnames=["board_f_cpu"],
buildenvvar="BOARD_F_CPU",
),
ConfigEnvOption(
group="platform",
name="board_build.f_flash",
description="A custom flash frequency",
oldnames=["board_f_flash"],
buildenvvar="BOARD_F_FLASH",
),
ConfigEnvOption(
group="platform",
name="board_build.flash_mode",
description="A custom flash mode",
oldnames=["board_flash_mode"],
buildenvvar="BOARD_FLASH_MODE",
),
# Build
ConfigEnvOption(
group="build",
name="build_type",
description="Project build configuration",
type=click.Choice(["release", "debug"]),
default="release",
),
ConfigEnvOption(
group="build",
name="build_flags",
description=(
"Custom build flags/options for preprocessing, compilation, "
"assembly, and linking processes"
),
multiple=True,
sysenvvar="PLATFORMIO_BUILD_FLAGS",
buildenvvar="BUILD_FLAGS",
),
ConfigEnvOption(
group="build",
name="src_build_flags",
description=(
"The same as `build_flags` but configures flags the only for "
"project source files (`src` folder)"
),
multiple=True,
sysenvvar="PLATFORMIO_SRC_BUILD_FLAGS",
buildenvvar="SRC_BUILD_FLAGS",
),
ConfigEnvOption(
group="build",
name="build_unflags",
description="A list with flags/option which should be removed",
multiple=True,
sysenvvar="PLATFORMIO_BUILD_UNFLAGS",
buildenvvar="BUILD_UNFLAGS",
),
ConfigEnvOption(
group="build",
name="src_filter",
description=(
"Control which source files should be included/excluded from a "
"build process"
),
multiple=True,
sysenvvar="PLATFORMIO_SRC_FILTER",
buildenvvar="SRC_FILTER",
default="+<*> -<.git/> -<.svn/>",
),
ConfigEnvOption(
group="build",
name="targets",
description="A custom list of targets for PlatformIO Build System",
multiple=True,
),
# Upload
ConfigEnvOption(
group="upload",
name="upload_port",
description=(
"An upload port which `uploader` tool uses for a firmware flashing"
),
sysenvvar="PLATFORMIO_UPLOAD_PORT",
buildenvvar="UPLOAD_PORT",
),
ConfigEnvOption(
group="upload",
name="upload_protocol",
description="A protocol that `uploader` tool uses to talk to a board",
buildenvvar="UPLOAD_PROTOCOL",
),
ConfigEnvOption(
group="upload",
name="upload_speed",
description=(
"A connection speed (baud rate) which `uploader` tool uses when "
"sending firmware to a board"
),
type=click.INT,
buildenvvar="UPLOAD_SPEED",
),
ConfigEnvOption(
group="upload",
name="upload_flags",
description="An extra flags for `uploader` tool",
multiple=True,
sysenvvar="PLATFORMIO_UPLOAD_FLAGS",
buildenvvar="UPLOAD_FLAGS",
),
ConfigEnvOption(
group="upload",
name="upload_resetmethod",
description="A custom reset method",
buildenvvar="UPLOAD_RESETMETHOD",
),
ConfigEnvOption(
group="upload",
name="upload_command",
description=(
"A custom upload command which overwrites a default from "
"development platform"
),
buildenvvar="UPLOADCMD",
),
# Monitor
ConfigEnvOption(
group="monitor",
name="monitor_port",
description="A port, a number or a device name",
),
ConfigEnvOption(
group="monitor",
name="monitor_speed",
description="A monitor speed (baud rate)",
type=click.INT,
oldnames=["monitor_baud"],
default=9600,
),
ConfigEnvOption(
group="monitor",
name="monitor_filters",
description=(
"Apply the filters and text transformations to monitor output"
),
multiple=True,
),
ConfigEnvOption(
group="monitor",
name="monitor_rts",
description="A monitor initial RTS line state",
type=click.IntRange(0, 1),
),
ConfigEnvOption(
group="monitor",
name="monitor_dtr",
description="A monitor initial DTR line state",
type=click.IntRange(0, 1),
),
ConfigEnvOption(
group="monitor",
name="monitor_flags",
description=(
"The extra flags and options for `platformio device monitor` "
"command"
),
multiple=True,
),
# Library
ConfigEnvOption(
group="library",
name="lib_deps",
description=(
"A list of project library dependencies which should be installed "
"automatically before a build process"
),
oldnames=["lib_use", "lib_force", "lib_install"],
multiple=True,
),
ConfigEnvOption(
group="library",
name="lib_ignore",
description=(
"A list of library names which should be ignored by "
"Library Dependency Finder (LDF)"
),
multiple=True,
),
ConfigEnvOption(
group="library",
name="lib_extra_dirs",
description=(
"A list of extra directories/storages where Library Dependency "
"Finder (LDF) will look for dependencies"
),
multiple=True,
sysenvvar="PLATFORMIO_LIB_EXTRA_DIRS",
),
ConfigEnvOption(
group="library",
name="lib_ldf_mode",
description=(
"Control how Library Dependency Finder (LDF) should analyze "
"dependencies (`#include` directives)"
),
type=click.Choice(["off", "chain", "deep", "chain+", "deep+"]),
default="chain",
),
ConfigEnvOption(
group="library",
name="lib_compat_mode",
description=(
"Configure a strictness (compatibility mode by frameworks, "
"development platforms) of Library Dependency Finder (LDF)"
),
type=click.Choice(["off", "soft", "strict"]),
default="soft",
),
ConfigEnvOption(
group="library",
name="lib_archive",
description=(
"Create an archive (`*.a`, static library) from the object files "
"and link it into a firmware (program)"
),
type=click.BOOL,
default=True,
),
# Check
ConfigEnvOption(
group="check",
name="check_tool",
description="A list of check tools used for analysis",
type=click.Choice(["cppcheck", "clangtidy", "pvs-studio"]),
multiple=True,
default=["cppcheck"],
),
ConfigEnvOption(
group="check",
name="check_patterns",
description=(
"Configure a list of target files or directories for checking "
"(Unix shell-style wildcards)"
),
multiple=True,
),
ConfigEnvOption(
group="check",
name="check_flags",
description="An extra flags to be passed to a check tool",
multiple=True,
),
ConfigEnvOption(
group="check",
name="check_severity",
description="List of defect severity types for analysis",
multiple=True,
type=click.Choice(["low", "medium", "high"]),
default=["low", "medium", "high"],
),
ConfigEnvOption(
group="check",
name="check_skip_packages",
description="Skip checking includes from packages directory",
type=click.BOOL,
default=False,
),
# Test
ConfigEnvOption(
group="test",
name="test_filter",
description="Process tests where the name matches specified patterns",
multiple=True,
),
ConfigEnvOption(
group="test",
name="test_ignore",
description="Ignore tests where the name matches specified patterns",
multiple=True,
),
ConfigEnvOption(
group="test",
name="test_port",
description="A serial port to communicate with a target device",
),
ConfigEnvOption(
group="test",
name="test_speed",
description="A connection speed (baud rate) to communicate with a target device",
type=click.INT,
),
ConfigEnvOption(
group="test",
name="test_transport",
description="A transport to communicate with a target device",
),
ConfigEnvOption(
group="test",
name="test_build_project_src",
description="Build project source code in a pair with test code",
type=click.BOOL,
default=False,
),
# Debug
ConfigEnvOption(
group="debug",
name="debug_tool",
description="A name of debugging tool",
),
ConfigEnvOption(
group="debug",
name="debug_build_flags",
description=(
"Custom debug flags/options for preprocessing, compilation, "
"assembly, and linking processes"
),
multiple=True,
default=["-Og", "-g2", "-ggdb2"],
),
ConfigEnvOption(
group="debug",
name="debug_init_break",
description=(
"An initial breakpoint that makes program stop whenever a "
"certain point in the program is reached"
),
default="tbreak main",
),
ConfigEnvOption(
group="debug",
name="debug_init_cmds",
description="Initial commands to be passed to a back-end debugger",
multiple=True,
),
ConfigEnvOption(
group="debug",
name="debug_extra_cmds",
description="An extra commands to be passed to a back-end debugger",
multiple=True,
),
ConfigEnvOption(
group="debug",
name="debug_load_cmds",
description=(
"A list of commands to be used to load program/firmware "
"to a target device"
),
oldnames=["debug_load_cmd"],
multiple=True,
default=["load"],
),
ConfigEnvOption(
group="debug",
name="debug_load_mode",
description=(
"Allows one to control when PlatformIO should load debugging "
"firmware to the end target"
),
type=click.Choice(["always", "modified", "manual"]),
default="always",
),
ConfigEnvOption(
group="debug",
name="debug_server",
description="Allows one to setup a custom debugging server",
multiple=True,
),
ConfigEnvOption(
group="debug",
name="debug_port",
description=(
"A debugging port of a remote target (a serial device or "
"network address)"
),
),
ConfigEnvOption(
group="debug",
name="debug_speed",
description="A debug adapter speed (JTAG speed)",
),
ConfigEnvOption(
group="debug",
name="debug_svd_path",
description=(
"A custom path to SVD file which contains information about "
"device peripherals"
),
type=click.Path(exists=True, file_okay=True, dir_okay=False),
),
ConfigEnvOption(
group="debug",
name="debug_server_ready_pattern",
description=(
"A pattern to determine when debugging server is ready "
"for an incoming connection"
),
),
ConfigEnvOption(
group="debug",
name="debug_test",
description=("A name of a unit test to be debugged"),
),
# Advanced
ConfigEnvOption(
group="advanced",
name="extends",
description=(
"Inherit configuration from other sections or build environments"
),
multiple=True,
),
ConfigEnvOption(
group="advanced",
name="extra_scripts",
description="A list of PRE and POST extra scripts",
oldnames=["extra_script"],
multiple=True,
sysenvvar="PLATFORMIO_EXTRA_SCRIPTS",
),
]
]
)
def get_config_options_schema():
return [opt.as_dict() for opt in ProjectOptions.values()]
| apache-2.0 |
payeezy/payeezy-deprecated | python/Dependancy/requests-master/requests/packages/chardet/__init__.py | 745 | 1295 | ######################## BEGIN LICENSE BLOCK ########################
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
__version__ = "2.2.1"
from sys import version_info
def detect(aBuf):
if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
(version_info >= (3, 0) and not isinstance(aBuf, bytes))):
raise ValueError('Expected a bytes object, not a unicode object')
from . import universaldetector
u = universaldetector.UniversalDetector()
u.reset()
u.feed(aBuf)
u.close()
return u.result
| mit |
odoo-isa/l10n-italy | l10n_it_abicab/__openerp__.py | 2 | 1392 | # -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Management Solution
# Copyright (C) 2015
# Associazione Odoo Italia (<http://www.odoo-italia.org>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
{
'name': 'Base Bank ABI/CAB codes',
'version': '8.0.1.0.0',
'category': 'Localisation/Italy',
'author': "OpenERP Italian Community,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'depends': ['account'],
'test': [
'test/abicab.yml',
],
'website': 'http://www.odoo-italia.org/',
'data': ['abicab_view.xml'],
'installable': False,
}
| agpl-3.0 |
pastaq/Tuxemon | tuxemon/core/states/start.py | 3 | 8495 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Tuxemon
# Copyright (C) 2014, William Edwards <shadowapex@gmail.com>,
# Benjamin Bean <superman2k5@gmail.com>
#
# This file is part of Tuxemon.
#
# Tuxemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tuxemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxemon. If not, see <http://www.gnu.org/licenses/>.
#
# Contributor(s):
#
# William Edwards <shadowapex@gmail.com>
#
#
# core.states.start Handles the splash screen and start menu.
#
#
import logging
import pygame
import os
import sys
import pprint
import random
from .. import tools, prepare
from ..components import pyganim
from ..components import db
from ..components import fusion
# Create a logger for optional handling of debug messages.
logger = logging.getLogger(__name__)
logger.debug("states.start successfully imported")
class StartScreen(tools._State):
"""The module responsible for the splash screen and start menu.
:param game: The scene manager object that contains all the game's variables.
:type game: core.tools.Control
"""
def __init__(self, game):
# Initiate our common state properties.
tools._State.__init__(self)
# The scene to load next when this scene has been completed.
self.next = "WORLD"
# Provide an instance of the scene manager to this scene.
self.game = game # The scene manger object
self.state = "Splash" # Can be Splash or Menu
self.fade = "in" # Can be "in", "out", "waiting", or None
# Create a surface to be used for transitions
self.transition = {}
self.transition['surface'] = pygame.Surface(prepare.SCREEN_SIZE)
self.transition['surface'].fill((0,0,0))
self.transition['surface'].set_alpha(255)
self.transition['alpha'] = 255
self.transition['time'] = 2 # 5 second transition time
self.wait_time = 0 # Current time we've waited between splash and start of game
# Set up the splash screen logos
self.splash_pygame = {}
self.splash_pygame['path'] = "resources/gfx/ui/intro/pygame_logo.png"
self.splash_pygame['surface'] = pygame.image.load(self.splash_pygame['path'])
self.splash_pygame['surface'] = pygame.transform.scale(self.splash_pygame['surface'],
(self.splash_pygame['surface'].get_width() * prepare.SCALE,
self.splash_pygame['surface'].get_height() * prepare.SCALE
))
splash_border = prepare.SCREEN_SIZE[0] / 20 # The space between the edge of the screen
self.splash_pygame['position'] = (splash_border,
prepare.SCREEN_SIZE[1] - splash_border - self.splash_pygame['surface'].get_height())
self.splash_cc = {}
self.splash_cc['path'] = "resources/gfx/ui/intro/creative_commons.png"
self.splash_cc['surface'] = pygame.image.load(self.splash_cc['path'])
self.splash_cc['surface'] = pygame.transform.scale(self.splash_cc['surface'],
(self.splash_cc['surface'].get_width() * prepare.SCALE,
self.splash_cc['surface'].get_height() * prepare.SCALE
))
self.splash_cc['position'] = (prepare.SCREEN_SIZE[0] - splash_border - self.splash_cc['surface'].get_width(),
prepare.SCREEN_SIZE[1] - splash_border - self.splash_cc['surface'].get_height())
def startup(self, current_time, persistant):
"""Perform startup tasks when we switch to this scene.
:param current_time: Current time passed.
:param persistant: Keep a dictionary of optional persistant variables.
:type current_time: Integer
:type persistant: Dictionary
:rtype: None
:returns: None
**Examples:**
>>> current_time
2895
>>> persistant
{}
"""
self.persist = persistant
self.start_time = current_time
self.state = "Splash" # Can be Splash or Menu
self.fade = "in" # Can be "in", "out", "waiting", or None
def cleanup(self):
"""Add variables that should persist to the self.persist dictionary.
Then reset State.done to False.
:param None:
:rtype: Dictionary
:returns: Persist dictionary of variables.
"""
self.done = False
return self.persist
def update(self, screen, keys, current_time, time_delta):
"""Update function for state.
:param surface: The pygame.Surface of the screen to draw to.
:param keys: List of keys from pygame.event.get().
:param current_time: The amount of time that has passed.
:type surface: pygame.Surface
:type keys: Tuple
:type current_time: Integer
:rtype: None
:returns: None
**Examples:**
>>> surface
<Surface(1280x720x32 SW)>
>>> keys
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ...
>>> current_time
435
"""
self.draw()
def get_event(self, event):
"""Processes events that were passed from the main event loop.
Must be overridden in children.
:param event: A pygame key event from pygame.event.get()
:type event: PyGame Event
:rtype: None
:returns: None
"""
# Skip the splash screen if a key is pressed.
if event.type == pygame.KEYDOWN and self.state == "Splash":
self.fade = None
self.state = None
self.done = True
def draw(self):
"""Draws the start screen to the screen.
:param None:
:type None:
:rtype: None
:returns: None
"""
self.game.screen.fill((15, 15, 15))
# Skip the splash screen if it is disabled in the game configuration
if prepare.CONFIG.splash != "1":
self.fade = None
self.state = None
# Start the game after splash
self.done = True
if self.state == "Splash":
self.game.screen.blit(self.splash_pygame['surface'], self.splash_pygame['position'])
self.game.screen.blit(self.splash_cc['surface'], self.splash_cc['position'])
if self.fade == "in":
self.transition['alpha'] -= (255 * ((self.game.time_passed_seconds)/self.transition['time']))
self.transition['surface'].set_alpha(self.transition['alpha'])
self.game.screen.blit(self.transition['surface'], (0,0))
if self.transition['alpha'] < 0:
self.fade = "waiting"
elif self.fade == "out":
self.transition['alpha'] += (255 * ((self.game.time_passed_seconds)/self.transition['time']))
self.transition['surface'].set_alpha(self.transition['alpha'])
self.game.screen.blit(self.transition['surface'], (0,0))
if self.transition['alpha'] > 255:
self.fade = None
self.state = None
# Start the game after splash
self.done = True
elif self.fade == "waiting":
self.wait_time += self.game.time_passed_seconds
if self.wait_time > self.transition['time']:
self.fade = "out"
| gpl-3.0 |
davelab6/pyfontaine | fontaine/charsets/noto_chars/notosansthai_regular.py | 2 | 6075 | # -*- coding: utf-8 -*-
class Charset(object):
common_name = 'NotoSansThai-Regular'
native_name = ''
def glyphs(self):
chars = []
chars.append(0x0000) #uni000D ????
chars.append(0x0E01) #uni0E01 THAI CHARACTER KO KAI
chars.append(0x0E02) #uni0E02 THAI CHARACTER KHO KHAI
chars.append(0x0E03) #uni0E03 THAI CHARACTER KHO KHUAT
chars.append(0x0E04) #uni0E04 THAI CHARACTER KHO KHWAI
chars.append(0x0E05) #uni0E05 THAI CHARACTER KHO KHON
chars.append(0x0E06) #uni0E06 THAI CHARACTER KHO RAKHANG
chars.append(0x0E07) #uni0E07 THAI CHARACTER NGO NGU
chars.append(0x0E08) #uni0E08 THAI CHARACTER CHO CHAN
chars.append(0x0E09) #uni0E09 THAI CHARACTER CHO CHING
chars.append(0x0E0A) #uni0E0A THAI CHARACTER CHO CHANG
chars.append(0x0E0B) #uni0E0B THAI CHARACTER SO SO
chars.append(0x0E0C) #uni0E0C THAI CHARACTER CHO CHOE
chars.append(0x000D) #nonmarkingreturn ????
chars.append(0x0E0E) #uni0E0E THAI CHARACTER DO CHADA
chars.append(0x0E0F) #uni0E0F THAI CHARACTER TO PATAK
chars.append(0x0E10) #uni0E10 THAI CHARACTER THO THAN
chars.append(0x0E11) #uni0E11 THAI CHARACTER THO NANGMONTHO
chars.append(0x0E12) #uni0E12 THAI CHARACTER THO PHUTHAO
chars.append(0x0E13) #uni0E13 THAI CHARACTER NO NEN
chars.append(0x0E14) #uni0E14 THAI CHARACTER DO DEK
chars.append(0x0E15) #uni0E15 THAI CHARACTER TO TAO
chars.append(0x0E16) #uni0E16 THAI CHARACTER THO THUNG
chars.append(0x0E17) #uni0E17 THAI CHARACTER THO THAHAN
chars.append(0x0E18) #uni0E18 THAI CHARACTER THO THONG
chars.append(0x0E19) #uni0E19 THAI CHARACTER NO NU
chars.append(0x0E1A) #uni0E1A THAI CHARACTER BO BAIMAI
chars.append(0x0E1B) #uni0E1B THAI CHARACTER PO PLA
chars.append(0x0E1C) #uni0E1C THAI CHARACTER PHO PHUNG
chars.append(0x0E1D) #uni0E1D THAI CHARACTER FO FA
chars.append(0x0E1E) #uni0E1E THAI CHARACTER PHO PHAN
chars.append(0x0E1F) #uni0E1F THAI CHARACTER FO FAN
chars.append(0x0020) #space SPACE
chars.append(0x0E21) #uni0E21 THAI CHARACTER MO MA
chars.append(0x0E22) #uni0E22 THAI CHARACTER YO YAK
chars.append(0x0E23) #uni0E23 THAI CHARACTER RO RUA
chars.append(0x0E24) #uni0E24 THAI CHARACTER RU
chars.append(0x0E25) #uni0E25 THAI CHARACTER LO LING
chars.append(0x0E26) #uni0E26 THAI CHARACTER LU
chars.append(0x0E27) #uni0E27 THAI CHARACTER WO WAEN
chars.append(0x0E28) #uni0E28 THAI CHARACTER SO SALA
chars.append(0x0E29) #uni0E29 THAI CHARACTER SO RUSI
chars.append(0x0E2A) #uni0E2A THAI CHARACTER SO SUA
chars.append(0x0E2B) #uni0E2B THAI CHARACTER HO HIP
chars.append(0x0E2C) #uni0E2C THAI CHARACTER LO CHULA
chars.append(0x0E2D) #uni0E2D THAI CHARACTER O ANG
chars.append(0x0E2E) #uni0E2E THAI CHARACTER HO NOKHUK
chars.append(0x0E2F) #uni0E2F THAI CHARACTER PAIYANNOI
chars.append(0x0E30) #uni0E30 THAI CHARACTER SARA A
chars.append(0x0E31) #uni0E31 THAI CHARACTER MAI HAN-AKAT
chars.append(0x0E32) #uni0E32 THAI CHARACTER SARA AA
chars.append(0x0E33) #uni0E33 THAI CHARACTER SARA AM
chars.append(0x0E34) #uni0E34 THAI CHARACTER SARA I
chars.append(0x0E35) #uni0E35 THAI CHARACTER SARA II
chars.append(0x0E36) #uni0E36 THAI CHARACTER SARA UE
chars.append(0x0E37) #uni0E37 THAI CHARACTER SARA UEE
chars.append(0x0E38) #uni0E38 THAI CHARACTER SARA U
chars.append(0x0E39) #uni0E39 THAI CHARACTER SARA UU
chars.append(0x0E3A) #uni0E3A THAI CHARACTER PHINTHU
chars.append(0x0E3F) #uni0E3F THAI CURRENCY SYMBOL BAHT
chars.append(0x0E40) #uni0E40 THAI CHARACTER SARA E
chars.append(0x0E41) #uni0E41 THAI CHARACTER SARA AE
chars.append(0x0E42) #uni0E42 THAI CHARACTER SARA O
chars.append(0x0E43) #uni0E43 THAI CHARACTER SARA AI MAIMUAN
chars.append(0x0E44) #uni0E44 THAI CHARACTER SARA AI MAIMALAI
chars.append(0x0E45) #uni0E45 THAI CHARACTER LAKKHANGYAO
chars.append(0x0E46) #uni0E46 THAI CHARACTER MAIYAMOK
chars.append(0x0E47) #uni0E47 THAI CHARACTER MAITAIKHU
chars.append(0x0E48) #uni0E48.small THAI CHARACTER MAI EK
chars.append(0x0E49) #uni0E49.small THAI CHARACTER MAI THO
chars.append(0x0E4A) #uni0E4A.small THAI CHARACTER MAI TRI
chars.append(0x0E4B) #uni0E4B.small THAI CHARACTER MAI CHATTAWA
chars.append(0x0E4C) #uni0E4C.small THAI CHARACTER THANTHAKHAT
chars.append(0x0E4D) #uni0E4D THAI CHARACTER NIKHAHIT
chars.append(0x0E4E) #uni0E4E THAI CHARACTER YAMAKKAN
chars.append(0x0E0D) #uni0E0D THAI CHARACTER YO YING
chars.append(0x0E50) #uni0E50 THAI DIGIT ZERO
chars.append(0x0E51) #uni0E51 THAI DIGIT ONE
chars.append(0x0E52) #uni0E52 THAI DIGIT TWO
chars.append(0x0E53) #uni0E53 THAI DIGIT THREE
chars.append(0x0E54) #uni0E54 THAI DIGIT FOUR
chars.append(0x0E55) #uni0E55 THAI DIGIT FIVE
chars.append(0x0E56) #uni0E56 THAI DIGIT SIX
chars.append(0x0E57) #uni0E57 THAI DIGIT SEVEN
chars.append(0x0E58) #uni0E58 THAI DIGIT EIGHT
chars.append(0x0E59) #uni0E59 THAI DIGIT NINE
chars.append(0x0E5A) #uni0E5A THAI CHARACTER ANGKHANKHU
chars.append(0x0E5B) #uni0E5B THAI CHARACTER KHOMUT
chars.append(0x200C) #uni200C ZERO WIDTH NON-JOINER
chars.append(0x200D) #uni200D ZERO WIDTH JOINER
chars.append(0x00A0) #space NO-BREAK SPACE
chars.append(0x0E20) #uni0E20 THAI CHARACTER PHO SAMPHAO
chars.append(0xFEFF) #uniFEFF ZERO WIDTH NO-BREAK SPACE
chars.append(0x200B) #uni200B ZERO WIDTH SPACE
chars.append(0x25CC) #uni25CC DOTTED CIRCLE
chars.append(0x0E4F) #uni0E4F THAI CHARACTER FONGMAN
return chars
| gpl-3.0 |
opencloudinfra/orchestrator | venv/Lib/site-packages/django/core/checks/security/sessions.py | 477 | 2595 | from django.conf import settings
from .. import Tags, Warning, register
def add_session_cookie_message(message):
return message + (
" Using a secure-only session cookie makes it more difficult for "
"network traffic sniffers to hijack user sessions."
)
W010 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_SECURE to True."
),
id='security.W010',
)
W011 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE_CLASSES, but you have not set "
"SESSION_COOKIE_SECURE to True."
),
id='security.W011',
)
W012 = Warning(
add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."),
id='security.W012',
)
def add_httponly_message(message):
return message + (
" Using an HttpOnly session cookie makes it more difficult for "
"cross-site scripting attacks to hijack user sessions."
)
W013 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_HTTPONLY to True.",
),
id='security.W013',
)
W014 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE_CLASSES, but you have not set "
"SESSION_COOKIE_HTTPONLY to True."
),
id='security.W014',
)
W015 = Warning(
add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."),
id='security.W015',
)
@register(Tags.security, deploy=True)
def check_session_cookie_secure(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_SECURE:
if _session_app():
errors.append(W010)
if _session_middleware():
errors.append(W011)
if len(errors) > 1:
errors = [W012]
return errors
@register(Tags.security, deploy=True)
def check_session_cookie_httponly(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_HTTPONLY:
if _session_app():
errors.append(W013)
if _session_middleware():
errors.append(W014)
if len(errors) > 1:
errors = [W015]
return errors
def _session_middleware():
return ("django.contrib.sessions.middleware.SessionMiddleware" in
settings.MIDDLEWARE_CLASSES)
def _session_app():
return "django.contrib.sessions" in settings.INSTALLED_APPS
| gpl-3.0 |
Ghalko/osf.io | website/prereg/views.py | 19 | 2443 | """Back-end code to support the Prereg Challenge initiative
Keeping the code together in this file should make it easier to remove the
features added to the OSF specifically to support this initiative in the future.
Other resources that are a part of the Prereg Challenge:
* website/static/js/pages/prereg-landing-page.js
* website/static/css/prereg.css
"""
from modularodm import Q
from framework.auth import decorators
from framework.utils import iso8601format
from website.util import permissions
from website.prereg.utils import get_prereg_schema
def drafts_for_user(user):
from website import models # noqa
user_projects = models.Node.find(
Q('is_deleted', 'eq', False) &
Q('permissions.{0}'.format(user._id), 'in', [permissions.ADMIN])
)
PREREG_CHALLENGE_METASCHEMA = get_prereg_schema()
return models.DraftRegistration.find(
Q('registration_schema', 'eq', PREREG_CHALLENGE_METASCHEMA) &
Q('approval', 'eq', None) &
Q('registered_node', 'eq', None) &
Q('branched_from', 'in', [p._id for p in user_projects])
)
@decorators.must_be_logged_in
def prereg_landing_page(auth, **kwargs):
"""Landing page for the prereg challenge"""
registerable_nodes = [
node for node
in auth.user.contributor_to
if node.has_permission(user=auth.user, permission='admin')
]
has_projects = bool(registerable_nodes)
has_draft_registrations = bool(drafts_for_user(auth.user).count())
return {
'has_draft_registrations': has_draft_registrations,
'has_projects': has_projects,
}
@decorators.must_be_logged_in
def prereg_draft_registrations(auth, **kwargs):
"""API endpoint; returns prereg draft registrations the user can resume"""
drafts = drafts_for_user(auth.user)
return {
'draftRegistrations': [
{
'dateUpdated': iso8601format(draft.datetime_updated),
'dateInitiated': iso8601format(draft.datetime_initiated),
'node': {
'title': draft.branched_from.title,
},
'initiator': {
'name': draft.initiator.fullname,
},
'url': draft.branched_from.web_url_for(
'edit_draft_registration_page',
draft_id=draft._id,
),
}
for draft in drafts
],
}
| apache-2.0 |
gkotton/neutron | neutron/plugins/nec/db/packetfilter.py | 15 | 9145 | # Copyright 2012-2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc as sa_exc
from sqlalchemy import sql
from neutron.api.v2 import attributes
from neutron.db import model_base
from neutron.db import models_v2
from neutron.openstack.common import uuidutils
from neutron.plugins.nec.db import models as nmodels
from neutron.plugins.nec.extensions import packetfilter as ext_pf
PF_STATUS_ACTIVE = 'ACTIVE'
PF_STATUS_DOWN = 'DOWN'
PF_STATUS_ERROR = 'ERROR'
INT_FIELDS = ('eth_type', 'src_port', 'dst_port')
class PacketFilter(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a packet filter."""
name = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False)
priority = sa.Column(sa.Integer, nullable=False)
action = sa.Column(sa.String(16), nullable=False)
# condition
in_port = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
nullable=True)
src_mac = sa.Column(sa.String(32), nullable=False)
dst_mac = sa.Column(sa.String(32), nullable=False)
eth_type = sa.Column(sa.Integer, nullable=False)
src_cidr = sa.Column(sa.String(64), nullable=False)
dst_cidr = sa.Column(sa.String(64), nullable=False)
protocol = sa.Column(sa.String(16), nullable=False)
src_port = sa.Column(sa.Integer, nullable=False)
dst_port = sa.Column(sa.Integer, nullable=False)
# status
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
status = sa.Column(sa.String(16), nullable=False)
network = orm.relationship(
models_v2.Network,
backref=orm.backref('packetfilters', lazy='joined', cascade='delete'),
uselist=False)
in_port_ref = orm.relationship(
models_v2.Port,
backref=orm.backref('packetfilters', lazy='joined', cascade='delete'),
primaryjoin="Port.id==PacketFilter.in_port",
uselist=False)
class PacketFilterDbMixin(object):
def _make_packet_filter_dict(self, pf_entry, fields=None):
res = {'id': pf_entry['id'],
'name': pf_entry['name'],
'tenant_id': pf_entry['tenant_id'],
'network_id': pf_entry['network_id'],
'action': pf_entry['action'],
'priority': pf_entry['priority'],
'in_port': pf_entry['in_port'],
# "or None" ensure the filed is None if empty
'src_mac': pf_entry['src_mac'] or None,
'dst_mac': pf_entry['dst_mac'] or None,
'eth_type': pf_entry['eth_type'] or None,
'src_cidr': pf_entry['src_cidr'] or None,
'dst_cidr': pf_entry['dst_cidr'] or None,
'protocol': pf_entry['protocol'] or None,
'src_port': pf_entry['src_port'] or None,
'dst_port': pf_entry['dst_port'] or None,
'admin_state_up': pf_entry['admin_state_up'],
'status': pf_entry['status']}
return self._fields(res, fields)
def _get_packet_filter(self, context, id):
try:
pf_entry = self._get_by_id(context, PacketFilter, id)
except sa_exc.NoResultFound:
raise ext_pf.PacketFilterNotFound(id=id)
return pf_entry
def get_packet_filter(self, context, id, fields=None):
pf_entry = self._get_packet_filter(context, id)
return self._make_packet_filter_dict(pf_entry, fields)
def get_packet_filters(self, context, filters=None, fields=None):
return self._get_collection(context,
PacketFilter,
self._make_packet_filter_dict,
filters=filters,
fields=fields)
def _replace_unspecified_field(self, params, key):
if not attributes.is_attr_set(params[key]):
if key == 'in_port':
params[key] = None
elif key in INT_FIELDS:
# Integer field
params[key] = 0
else:
params[key] = ''
def _get_eth_type_for_protocol(self, protocol):
if protocol.upper() in ("ICMP", "TCP", "UDP"):
return 0x800
elif protocol.upper() == "ARP":
return 0x806
def _set_eth_type_from_protocol(self, filter_dict):
if filter_dict.get('protocol'):
eth_type = self._get_eth_type_for_protocol(filter_dict['protocol'])
if eth_type:
filter_dict['eth_type'] = eth_type
def _check_eth_type_and_protocol(self, new_filter, current_filter):
if 'protocol' in new_filter or 'eth_type' not in new_filter:
return
eth_type = self._get_eth_type_for_protocol(current_filter['protocol'])
if not eth_type:
return
if eth_type != new_filter['eth_type']:
raise ext_pf.PacketFilterEtherTypeProtocolMismatch(
eth_type=hex(new_filter['eth_type']),
protocol=current_filter['protocol'])
def create_packet_filter(self, context, packet_filter):
pf_dict = packet_filter['packet_filter']
tenant_id = self._get_tenant_id_for_create(context, pf_dict)
if pf_dict['in_port'] == attributes.ATTR_NOT_SPECIFIED:
# validate network ownership
self.get_network(context, pf_dict['network_id'])
else:
# validate port ownership
self.get_port(context, pf_dict['in_port'])
params = {'tenant_id': tenant_id,
'id': pf_dict.get('id') or uuidutils.generate_uuid(),
'name': pf_dict['name'],
'network_id': pf_dict['network_id'],
'priority': pf_dict['priority'],
'action': pf_dict['action'],
'admin_state_up': pf_dict.get('admin_state_up', True),
'status': PF_STATUS_DOWN,
'in_port': pf_dict['in_port'],
'src_mac': pf_dict['src_mac'],
'dst_mac': pf_dict['dst_mac'],
'eth_type': pf_dict['eth_type'],
'src_cidr': pf_dict['src_cidr'],
'dst_cidr': pf_dict['dst_cidr'],
'src_port': pf_dict['src_port'],
'dst_port': pf_dict['dst_port'],
'protocol': pf_dict['protocol']}
for key in params:
self._replace_unspecified_field(params, key)
self._set_eth_type_from_protocol(params)
with context.session.begin(subtransactions=True):
pf_entry = PacketFilter(**params)
context.session.add(pf_entry)
return self._make_packet_filter_dict(pf_entry)
def update_packet_filter(self, context, id, packet_filter):
params = packet_filter['packet_filter']
for key in params:
self._replace_unspecified_field(params, key)
self._set_eth_type_from_protocol(params)
with context.session.begin(subtransactions=True):
pf_entry = self._get_packet_filter(context, id)
self._check_eth_type_and_protocol(params, pf_entry)
pf_entry.update(params)
return self._make_packet_filter_dict(pf_entry)
def delete_packet_filter(self, context, id):
with context.session.begin(subtransactions=True):
pf_entry = self._get_packet_filter(context, id)
context.session.delete(pf_entry)
def get_packet_filters_for_port(self, context, port):
"""Retrieve packet filters on OFC on a given port.
It returns a list of tuple (neutron filter_id, OFC id).
"""
query = (context.session.query(nmodels.OFCFilterMapping)
.join(PacketFilter,
nmodels.OFCFilterMapping.neutron_id == PacketFilter.id)
.filter(PacketFilter.admin_state_up == sql.true()))
network_id = port['network_id']
net_pf_query = (query.filter(PacketFilter.network_id == network_id)
.filter(PacketFilter.in_port == sql.null()))
net_filters = [(pf['neutron_id'], pf['ofc_id']) for pf in net_pf_query]
port_pf_query = query.filter(PacketFilter.in_port == port['id'])
port_filters = [(pf['neutron_id'], pf['ofc_id'])
for pf in port_pf_query]
return net_filters + port_filters
| apache-2.0 |
wdv4758h/ZipPy | lib-python/3/lib2to3/tests/test_pytree.py | 131 | 17346 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Unit tests for pytree.py.
NOTE: Please *don't* add doc strings to individual test methods!
In verbose mode, printing of the module, class and method name is much
more helpful than printing of (the first line of) the docstring,
especially when debugging a test.
"""
from __future__ import with_statement
import sys
import warnings
# Testing imports
from . import support
from lib2to3 import pytree
try:
sorted
except NameError:
def sorted(lst):
l = list(lst)
l.sort()
return l
class TestNodes(support.TestCase):
"""Unit tests for nodes (Base, Leaf, Node)."""
if sys.version_info >= (2,6):
# warnings.catch_warnings is new in 2.6.
def test_deprecated_prefix_methods(self):
l = pytree.Leaf(100, "foo")
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always", DeprecationWarning)
self.assertEqual(l.get_prefix(), "")
l.set_prefix("hi")
self.assertEqual(l.prefix, "hi")
self.assertEqual(len(w), 2)
for warning in w:
self.assertTrue(warning.category is DeprecationWarning)
self.assertEqual(str(w[0].message), "get_prefix() is deprecated; " \
"use the prefix property")
self.assertEqual(str(w[1].message), "set_prefix() is deprecated; " \
"use the prefix property")
def test_instantiate_base(self):
if __debug__:
# Test that instantiating Base() raises an AssertionError
self.assertRaises(AssertionError, pytree.Base)
def test_leaf(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.type, 100)
self.assertEqual(l1.value, "foo")
def test_leaf_repr(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(repr(l1), "Leaf(100, 'foo')")
def test_leaf_str(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(str(l1), "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1)))
self.assertEqual(str(l2), " foo")
def test_leaf_str_numeric_value(self):
# Make sure that the Leaf's value is stringified. Failing to
# do this can cause a TypeError in certain situations.
l1 = pytree.Leaf(2, 5)
l1.prefix = "foo_"
self.assertEqual(str(l1), "foo_5")
def test_leaf_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo", context=(" ", (1, 0)))
self.assertEqual(l1, l2)
l3 = pytree.Leaf(101, "foo")
l4 = pytree.Leaf(100, "bar")
self.assertNotEqual(l1, l3)
self.assertNotEqual(l1, l4)
def test_leaf_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
self.assertFalse(l1.was_changed)
l1.prefix = " ##\n\n"
self.assertEqual(l1.prefix, " ##\n\n")
self.assertTrue(l1.was_changed)
def test_node(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(200, "bar")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(n1.type, 1000)
self.assertEqual(n1.children, [l1, l2])
def test_node_repr(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(repr(n1),
"Node(1000, [%s, %s])" % (repr(l1), repr(l2)))
def test_node_str(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar", context=(" ", (1, 0)))
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(str(n1), "foo bar")
def test_node_prefix(self):
l1 = pytree.Leaf(100, "foo")
self.assertEqual(l1.prefix, "")
n1 = pytree.Node(1000, [l1])
self.assertEqual(n1.prefix, "")
n1.prefix = " "
self.assertEqual(n1.prefix, " ")
self.assertEqual(l1.prefix, " ")
def test_get_suffix(self):
l1 = pytree.Leaf(100, "foo", prefix="a")
l2 = pytree.Leaf(100, "bar", prefix="b")
n1 = pytree.Node(1000, [l1, l2])
self.assertEqual(l1.get_suffix(), l2.prefix)
self.assertEqual(l2.get_suffix(), "")
self.assertEqual(n1.get_suffix(), "")
l3 = pytree.Leaf(100, "bar", prefix="c")
n2 = pytree.Node(1000, [n1, l3])
self.assertEqual(n1.get_suffix(), l3.prefix)
self.assertEqual(l3.get_suffix(), "")
self.assertEqual(n2.get_suffix(), "")
def test_node_equality(self):
n1 = pytree.Node(1000, ())
n2 = pytree.Node(1000, [], context=(" ", (1, 0)))
self.assertEqual(n1, n2)
n3 = pytree.Node(1001, ())
self.assertNotEqual(n1, n3)
def test_node_recursive_equality(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
n2 = pytree.Node(1000, [l2])
self.assertEqual(n1, n2)
l3 = pytree.Leaf(100, "bar")
n3 = pytree.Node(1000, [l3])
self.assertNotEqual(n1, n3)
def test_replace(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
self.assertEqual(n1.children, [l1, l2, l3])
self.assertTrue(isinstance(n1.children, list))
self.assertFalse(n1.was_changed)
l2new = pytree.Leaf(100, "-")
l2.replace(l2new)
self.assertEqual(n1.children, [l1, l2new, l3])
self.assertTrue(isinstance(n1.children, list))
self.assertTrue(n1.was_changed)
def test_replace_with_list(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")])
self.assertEqual(str(n1), "foo**bar")
self.assertTrue(isinstance(n1.children, list))
def test_leaves(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [l3])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(list(n1.leaves()), [l1, l2, l3])
def test_depth(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(l1.depth(), 2)
self.assertEqual(n3.depth(), 1)
self.assertEqual(n1.depth(), 0)
def test_post_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.post_order()), [l1, l2, c1, l3, n1])
def test_pre_order(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
c1 = pytree.Node(1000, [l1, l2])
n1 = pytree.Node(1000, [c1, l3])
self.assertEqual(list(n1.pre_order()), [n1, c1, l1, l2, l3])
def test_changed(self):
l1 = pytree.Leaf(100, "f")
self.assertFalse(l1.was_changed)
l1.changed()
self.assertTrue(l1.was_changed)
l1 = pytree.Leaf(100, "f")
n1 = pytree.Node(1000, [l1])
self.assertFalse(n1.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "+")
l3 = pytree.Leaf(100, "bar")
n1 = pytree.Node(1000, [l1, l2, l3])
n2 = pytree.Node(1000, [n1])
self.assertFalse(l1.was_changed)
self.assertFalse(n1.was_changed)
self.assertFalse(n2.was_changed)
n1.changed()
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertFalse(l1.was_changed)
def test_leaf_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self", prefix=prefix)
self.assertTrue(str(l1), prefix + "self")
self.assertEqual(l1.prefix, prefix)
def test_node_constructor_prefix(self):
for prefix in ("xyz_", ""):
l1 = pytree.Leaf(100, "self")
l2 = pytree.Leaf(100, "foo", prefix="_")
n1 = pytree.Node(1000, [l1, l2], prefix=prefix)
self.assertTrue(str(n1), prefix + "self_foo")
self.assertEqual(n1.prefix, prefix)
self.assertEqual(l1.prefix, prefix)
self.assertEqual(l2.prefix, "_")
def test_remove(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [n1])
self.assertEqual(n1.remove(), 0)
self.assertEqual(n2.children, [])
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertFalse(n1.was_changed)
self.assertTrue(n2.was_changed)
self.assertEqual(l2.remove(), 1)
self.assertEqual(l1.remove(), 0)
self.assertEqual(n1.children, [])
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, None)
self.assertEqual(n2.parent, None)
self.assertTrue(n1.was_changed)
self.assertTrue(n2.was_changed)
def test_remove_parentless(self):
n1 = pytree.Node(1000, [])
n1.remove()
self.assertEqual(n1.parent, None)
l1 = pytree.Leaf(100, "foo")
l1.remove()
self.assertEqual(l1.parent, None)
def test_node_set_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.set_child(0, l2)
self.assertEqual(l1.parent, None)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2])
n2 = pytree.Node(1000, [l1])
n2.set_child(0, n1)
self.assertEqual(l1.parent, None)
self.assertEqual(n1.parent, n2)
self.assertEqual(n2.parent, None)
self.assertEqual(n2.children, [n1])
self.assertRaises(IndexError, n1.set_child, 4, l2)
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.set_child, 0, list)
def test_node_insert_child(self):
l1 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1])
l2 = pytree.Leaf(100, "bar")
n1.insert_child(0, l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l2, l1])
l3 = pytree.Leaf(100, "abc")
n1.insert_child(2, l3)
self.assertEqual(n1.children, [l2, l1, l3])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.insert_child, 0, list)
def test_node_append_child(self):
n1 = pytree.Node(1000, [])
l1 = pytree.Leaf(100, "foo")
n1.append_child(l1)
self.assertEqual(l1.parent, n1)
self.assertEqual(n1.children, [l1])
l2 = pytree.Leaf(100, "bar")
n1.append_child(l2)
self.assertEqual(l2.parent, n1)
self.assertEqual(n1.children, [l1, l2])
# I don't care what it raises, so long as it's an exception
self.assertRaises(Exception, n1.append_child, list)
def test_node_next_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertTrue(n1.next_sibling is n2)
self.assertEqual(n2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_leaf_next_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertTrue(l1.next_sibling is l2)
self.assertEqual(l2.next_sibling, None)
self.assertEqual(p1.next_sibling, None)
def test_node_prev_sibling(self):
n1 = pytree.Node(1000, [])
n2 = pytree.Node(1000, [])
p1 = pytree.Node(1000, [n1, n2])
self.assertTrue(n2.prev_sibling is n1)
self.assertEqual(n1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
def test_leaf_prev_sibling(self):
l1 = pytree.Leaf(100, "a")
l2 = pytree.Leaf(100, "b")
p1 = pytree.Node(1000, [l1, l2])
self.assertTrue(l2.prev_sibling is l1)
self.assertEqual(l1.prev_sibling, None)
self.assertEqual(p1.prev_sibling, None)
class TestPatterns(support.TestCase):
"""Unit tests for tree matching patterns."""
def test_basic_patterns(self):
# Build a tree
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern matching a leaf
pl = pytree.LeafPattern(100, "foo", name="pl")
r = {}
self.assertFalse(pl.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n1, results=r))
self.assertEqual(r, {})
self.assertFalse(pl.match(n2, results=r))
self.assertEqual(r, {})
self.assertTrue(pl.match(l1, results=r))
self.assertEqual(r, {"pl": l1})
r = {}
self.assertFalse(pl.match(l2, results=r))
self.assertEqual(r, {})
# Build a pattern matching a node
pn = pytree.NodePattern(1000, [pl], name="pn")
self.assertFalse(pn.match(root, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(n1, results=r))
self.assertEqual(r, {})
self.assertTrue(pn.match(n2, results=r))
self.assertEqual(r, {"pn": n2, "pl": l3})
r = {}
self.assertFalse(pn.match(l1, results=r))
self.assertEqual(r, {})
self.assertFalse(pn.match(l2, results=r))
self.assertEqual(r, {})
def test_wildcard(self):
# Build a tree for testing
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "foo")
n1 = pytree.Node(1000, [l1, l2])
n2 = pytree.Node(1000, [l3])
root = pytree.Node(1000, [n1, n2])
# Build a pattern
pl = pytree.LeafPattern(100, "foo", name="pl")
pn = pytree.NodePattern(1000, [pl], name="pn")
pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw")
r = {}
self.assertFalse(pw.match_seq([root], r))
self.assertEqual(r, {})
self.assertFalse(pw.match_seq([n1], r))
self.assertEqual(r, {})
self.assertTrue(pw.match_seq([n2], r))
# These are easier to debug
self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"])
self.assertEqual(r["pl"], l1)
self.assertEqual(r["pn"], n2)
self.assertEqual(r["pw"], [n2])
# But this is equivalent
self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]})
r = {}
self.assertTrue(pw.match_seq([l1, l3], r))
self.assertEqual(r, {"pl": l3, "pw": [l1, l3]})
self.assertTrue(r["pl"] is l3)
r = {}
def test_generate_matches(self):
la = pytree.Leaf(1, "a")
lb = pytree.Leaf(1, "b")
lc = pytree.Leaf(1, "c")
ld = pytree.Leaf(1, "d")
le = pytree.Leaf(1, "e")
lf = pytree.Leaf(1, "f")
leaves = [la, lb, lc, ld, le, lf]
root = pytree.Node(1000, leaves)
pa = pytree.LeafPattern(1, "a", "pa")
pb = pytree.LeafPattern(1, "b", "pb")
pc = pytree.LeafPattern(1, "c", "pc")
pd = pytree.LeafPattern(1, "d", "pd")
pe = pytree.LeafPattern(1, "e", "pe")
pf = pytree.LeafPattern(1, "f", "pf")
pw = pytree.WildcardPattern([[pa, pb, pc], [pd, pe],
[pa, pb], [pc, pd], [pe, pf]],
min=1, max=4, name="pw")
self.assertEqual([x[0] for x in pw.generate_matches(leaves)],
[3, 5, 2, 4, 6])
pr = pytree.NodePattern(type=1000, content=[pw], name="pr")
matches = list(pytree.generate_matches([pr], [root]))
self.assertEqual(len(matches), 1)
c, r = matches[0]
self.assertEqual(c, 1)
self.assertEqual(str(r["pr"]), "abcdef")
self.assertEqual(r["pw"], [la, lb, lc, ld, le, lf])
for c in "abcdef":
self.assertEqual(r["p" + c], pytree.Leaf(1, c))
def test_has_key_example(self):
pattern = pytree.NodePattern(331,
(pytree.LeafPattern(7),
pytree.WildcardPattern(name="args"),
pytree.LeafPattern(8)))
l1 = pytree.Leaf(7, "(")
l2 = pytree.Leaf(3, "x")
l3 = pytree.Leaf(8, ")")
node = pytree.Node(331, [l1, l2, l3])
r = {}
self.assertTrue(pattern.match(node, r))
self.assertEqual(r["args"], [l2])
| bsd-3-clause |
eRestin/Mezz | mezzanine/blog/migrations/0004_auto__del_field_blogpost_category.py | 12 | 7386 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'BlogPost.category'
db.delete_column('blog_blogpost', 'category_id')
def backwards(self, orm):
# Adding field 'BlogPost.category'
db.add_column('blog_blogpost', 'category', self.gf('django.db.models.fields.related.ForeignKey')(related_name='blogposts', null=True, to=orm['blog.BlogCategory'], blank=True), keep_default=False)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blog.blogcategory': {
'Meta': {'object_name': 'BlogCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'blog.blogpost': {
'Meta': {'object_name': 'BlogPost'},
'_keywords': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'blogposts'", 'blank': 'True', 'to': "orm['blog.BlogCategory']"}),
'content': ('mezzanine.core.fields.HtmlField', [], {}),
'description': ('mezzanine.core.fields.HtmlField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.Keyword']", 'symmetrical': 'False', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'blogposts'", 'to': "orm['auth.User']"})
},
'blog.comment': {
'Meta': {'object_name': 'Comment'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'blog_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['blog.BlogPost']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'by_author': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'email_hash': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'replied_to': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['blog.Comment']"}),
'time_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.keyword': {
'Meta': {'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blog']
| bsd-2-clause |
TheTypoMaster/chromium-crosswalk | third_party/jinja2/optimizer.py | 1401 | 2302 | # -*- coding: utf-8 -*-
"""
jinja2.optimizer
~~~~~~~~~~~~~~~~
The jinja optimizer is currently trying to constant fold a few expressions
and modify the AST in place so that it should be easier to evaluate it.
Because the AST does not contain all the scoping information and the
compiler has to find that out, we cannot do all the optimizations we
want. For example loop unrolling doesn't work because unrolled loops would
have a different scoping.
The solution would be a second syntax tree that has the scoping rules stored.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2 import nodes
from jinja2.visitor import NodeTransformer
def optimize(node, environment):
"""The context hint can be used to perform an static optimization
based on the context given."""
optimizer = Optimizer(environment)
return optimizer.visit(node)
class Optimizer(NodeTransformer):
def __init__(self, environment):
self.environment = environment
def visit_If(self, node):
"""Eliminate dead code."""
# do not optimize ifs that have a block inside so that it doesn't
# break super().
if node.find(nodes.Block) is not None:
return self.generic_visit(node)
try:
val = self.visit(node.test).as_const()
except nodes.Impossible:
return self.generic_visit(node)
if val:
body = node.body
else:
body = node.else_
result = []
for node in body:
result.extend(self.visit_list(node))
return result
def fold(self, node):
"""Do constant folding."""
node = self.generic_visit(node)
try:
return nodes.Const.from_untrusted(node.as_const(),
lineno=node.lineno,
environment=self.environment)
except nodes.Impossible:
return node
visit_Add = visit_Sub = visit_Mul = visit_Div = visit_FloorDiv = \
visit_Pow = visit_Mod = visit_And = visit_Or = visit_Pos = visit_Neg = \
visit_Not = visit_Compare = visit_Getitem = visit_Getattr = visit_Call = \
visit_Filter = visit_Test = visit_CondExpr = fold
del fold
| bsd-3-clause |
daineseh/kodi-plugin.video.ted-talks-chinese | youtube_dl/extractor/kickstarter.py | 16 | 2747 | # encoding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import smuggle_url
class KickStarterIE(InfoExtractor):
_VALID_URL = r'https?://www\.kickstarter\.com/projects/(?P<id>[^/]*)/.*'
_TESTS = [{
'url': 'https://www.kickstarter.com/projects/1404461844/intersection-the-story-of-josh-grant/description',
'md5': 'c81addca81327ffa66c642b5d8b08cab',
'info_dict': {
'id': '1404461844',
'ext': 'mp4',
'title': 'Intersection: The Story of Josh Grant by Kyle Cowling',
'description': (
'A unique motocross documentary that examines the '
'life and mind of one of sports most elite athletes: Josh Grant.'
),
},
}, {
'note': 'Embedded video (not using the native kickstarter video service)',
'url': 'https://www.kickstarter.com/projects/597507018/pebble-e-paper-watch-for-iphone-and-android/posts/659178',
'info_dict': {
'id': '78704821',
'ext': 'mp4',
'uploader_id': 'pebble',
'uploader': 'Pebble Technology',
'title': 'Pebble iOS Notifications',
},
'add_ie': ['Vimeo'],
}, {
'url': 'https://www.kickstarter.com/projects/1420158244/power-drive-2000/widget/video.html',
'info_dict': {
'id': '1420158244',
'ext': 'mp4',
'title': 'Power Drive 2000',
},
'expected_warnings': ['OpenGraph description'],
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
r'<title>\s*(.*?)(?:\s*—\s*Kickstarter)?\s*</title>',
webpage, 'title')
video_url = self._search_regex(
r'data-video-url="(.*?)"',
webpage, 'video URL', default=None)
if video_url is None: # No native kickstarter, look for embedded videos
return {
'_type': 'url_transparent',
'ie_key': 'Generic',
'url': smuggle_url(url, {'to_generic': True}),
'title': title,
}
thumbnail = self._og_search_thumbnail(webpage, default=None)
if thumbnail is None:
thumbnail = self._html_search_regex(
r'<img[^>]+class="[^"]+\s*poster\s*[^"]+"[^>]+src="([^"]+)"',
webpage, 'thumbnail image', fatal=False)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': self._og_search_description(webpage),
'thumbnail': thumbnail,
}
| gpl-2.0 |
ssvsergeyev/ZenPacks.zenoss.AWS | src/boto/boto/opsworks/__init__.py | 135 | 1657 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the Amazon OpsWorks service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.opsworks.layer1 import OpsWorksConnection
return get_regions('opsworks', connection_cls=OpsWorksConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| gpl-2.0 |
piquadrat/django | tests/utils_tests/test_crypto.py | 4 | 4775 | import binascii
import hashlib
import unittest
from django.utils.crypto import constant_time_compare, pbkdf2
class TestUtilsCryptoMisc(unittest.TestCase):
def test_constant_time_compare(self):
# It's hard to test for constant time, just test the result.
self.assertTrue(constant_time_compare(b'spam', b'spam'))
self.assertFalse(constant_time_compare(b'spam', b'eggs'))
self.assertTrue(constant_time_compare('spam', 'spam'))
self.assertFalse(constant_time_compare('spam', 'eggs'))
class TestUtilsCryptoPBKDF2(unittest.TestCase):
# http://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06
rfc_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "0c60c80f961f0e71f3a9b524af6012062fe037a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 2,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 4096,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "4b007901b765489abead49d926f721d065a429c1",
},
# # this takes way too long :(
# {
# "args": {
# "password": "password",
# "salt": "salt",
# "iterations": 16777216,
# "dklen": 20,
# "digest": hashlib.sha1,
# },
# "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984",
# },
{
"args": {
"password": "passwordPASSWORDpassword",
"salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt",
"iterations": 4096,
"dklen": 25,
"digest": hashlib.sha1,
},
"result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038",
},
{
"args": {
"password": "pass\0word",
"salt": "sa\0lt",
"iterations": 4096,
"dklen": 16,
"digest": hashlib.sha1,
},
"result": "56fa6aa75548099dcc37d7f03425e0c3",
},
]
regression_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha256,
},
"result": "120fb6cffcf8b32c43e7225256c4f837a86548c9",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha512,
},
"result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1000,
"dklen": 0,
"digest": hashlib.sha512,
},
"result": ("afe6c5530785b6cc6b1c6453384731bd5ee432ee"
"549fd42fb6695779ad8a1c5bf59de69c48f774ef"
"c4007d5298f9033c0241d5ab69305e7b64eceeb8d"
"834cfec"),
},
# Check leading zeros are not stripped (#17481)
{
"args": {
"password": b'\xba',
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": '0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b',
},
]
def test_public_vectors(self):
for vector in self.rfc_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
def test_regression_vectors(self):
for vector in self.regression_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
def test_default_hmac_alg(self):
kwargs = {'password': b'password', 'salt': b'salt', 'iterations': 1, 'dklen': 20}
self.assertEqual(pbkdf2(**kwargs), hashlib.pbkdf2_hmac(hash_name=hashlib.sha256().name, **kwargs))
| bsd-3-clause |
Knio/miru | examples/mesh04.py | 1 | 1668 | from pyglet import options
options['debug_gl'] = False
options['debug_gl_trace'] = True
options['debug_gl_trace_args'] = True
options['vsync'] = False
from pyglet.gl import *
import miru.ui
from miru.context import context
import miru.graphics
import miru.input
from miru import core
import pyglet.graphics
import pyglet.image
from pyglet import clock
import os, sys, random
window = miru.ui.TestWindow(600, 400)
context.window = window
context.camera.pos.z = 20
context.control = miru.input.SimpleMouseControl()
context.osd.add_object(clock.ClockDisplay())
# Create mesh object and add to batch
mesh_id = miru.graphics.load_mesh(
(sys.argv[1:] and sys.argv[1]
or os.path.join('docs', 'demo', 'alien.obj')))
batch = pyglet.graphics.Batch()
img = pyglet.image.load(
os.path.join('docs', 'demo', 'orange.png'))
tex = miru.graphics.get_wrapped_texture(img)
tex_group = miru.graphics.TextureTransformGroup(texture=tex,
translation=[0,0,0], rotation=[0,0,0])
miru.graphics.batch_mesh(
mesh_id, batch, tex_group, False)
for i in range(35):
x = -7.5 + random.random() * 15
y = -7.5 + random.random() * 15
z = -7.5 + random.random() * 15
next_id = miru.graphics.mesh_transform(mesh_id, translation=(x,y,z))
miru.graphics.batch_mesh(next_id, batch, tex_group, False)
velocity = 0.1
def update(dt):
delta = dt * velocity
tex_group.translation[0] += delta
clock.schedule_interval(update, 1/60.)
obj = core.Object(batch)
context.add_object(obj)
while not window.has_exit:
clock.tick()
window.clear()
window.dispatch_events()
context.render()
window.flip()
window.close()
| mit |
mustafat/odoo-1 | addons/website_google_map/controllers/main.py | 36 | 2724 | # -*- coding: utf-8 -*-
import json
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.tools import html_escape as escape
class google_map(http.Controller):
'''
This class generates on-the-fly partner maps that can be reused in every
website page. To do so, just use an ``<iframe ...>`` whose ``src``
attribute points to ``/google_map`` (this controller generates a complete
HTML5 page).
URL query parameters:
- ``partner_ids``: a comma-separated list of ids (partners to be shown)
- ``partner_url``: the base-url to display the partner
(eg: if ``partner_url`` is ``/partners/``, when the user will click on
a partner on the map, it will be redirected to <myodoo>.com/partners/<id>)
In order to resize the map, simply resize the ``iframe`` with CSS
directives ``width`` and ``height``.
'''
@http.route(['/google_map'], type='http', auth="public", website=True)
def google_map(self, *arg, **post):
cr, uid, context = request.cr, request.uid, request.context
partner_obj = request.registry['res.partner']
# filter real ints from query parameters and build a domain
clean_ids = []
for s in post.get('partner_ids', "").split(","):
try:
i = int(s)
clean_ids.append(i)
except ValueError:
pass
# search for partners that can be displayed on a map
domain = [("id", "in", clean_ids), ('website_published', '=', True), ('is_company', '=', True)]
partners_ids = partner_obj.search(cr, SUPERUSER_ID, domain, context=context)
# browse and format data
partner_data = {
"counter": len(partners_ids),
"partners": []
}
request.context.update({'show_address': True})
for partner in partner_obj.browse(cr, SUPERUSER_ID, partners_ids, context=context):
partner_data["partners"].append({
'id': partner.id,
'name': escape(partner.name),
'address': escape('\n'.join(partner.name_get()[0][1].split('\n')[1:])),
'latitude': escape(str(partner.partner_latitude)),
'longitude': escape(str(partner.partner_longitude)),
})
if 'customers' in post.get('partner_url', ''):
partner_url = '/customers/'
else:
partner_url = '/partners/'
# generate the map
values = {
'partner_url': partner_url,
'partner_data': json.dumps(partner_data)
}
return request.website.render("website_google_map.google_map", values)
| agpl-3.0 |
stevens-yang/aware-dataplan | dpdk-2.1.0/tools/dpdk_nic_bind.py | 39 | 20227 | #! /usr/bin/python
#
# BSD LICENSE
#
# Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys, os, getopt, subprocess
from os.path import exists, abspath, dirname, basename
# The PCI device class for ETHERNET devices
ETHERNET_CLASS = "0200"
# global dict ethernet devices present. Dictionary indexed by PCI address.
# Each device within this is itself a dictionary of device properties
devices = {}
# list of supported DPDK drivers
dpdk_drivers = [ "igb_uio", "vfio-pci", "uio_pci_generic" ]
# command-line arg flags
b_flag = None
status_flag = False
force_flag = False
args = []
def usage():
'''Print usage information for the program'''
argv0 = basename(sys.argv[0])
print """
Usage:
------
%(argv0)s [options] DEVICE1 DEVICE2 ....
where DEVICE1, DEVICE2 etc, are specified via PCI "domain:bus:slot.func" syntax
or "bus:slot.func" syntax. For devices bound to Linux kernel drivers, they may
also be referred to by Linux interface name e.g. eth0, eth1, em0, em1, etc.
Options:
--help, --usage:
Display usage information and quit
--status:
Print the current status of all known network interfaces.
For each device, it displays the PCI domain, bus, slot and function,
along with a text description of the device. Depending upon whether the
device is being used by a kernel driver, the igb_uio driver, or no
driver, other relevant information will be displayed:
* the Linux interface name e.g. if=eth0
* the driver being used e.g. drv=igb_uio
* any suitable drivers not currently using that device
e.g. unused=igb_uio
NOTE: if this flag is passed along with a bind/unbind option, the status
display will always occur after the other operations have taken place.
-b driver, --bind=driver:
Select the driver to use or \"none\" to unbind the device
-u, --unbind:
Unbind a device (Equivalent to \"-b none\")
--force:
By default, devices which are used by Linux - as indicated by having
routes in the routing table - cannot be modified. Using the --force
flag overrides this behavior, allowing active links to be forcibly
unbound.
WARNING: This can lead to loss of network connection and should be used
with caution.
Examples:
---------
To display current device status:
%(argv0)s --status
To bind eth1 from the current driver and move to use igb_uio
%(argv0)s --bind=igb_uio eth1
To unbind 0000:01:00.0 from using any driver
%(argv0)s -u 0000:01:00.0
To bind 0000:02:00.0 and 0000:02:00.1 to the ixgbe kernel driver
%(argv0)s -b ixgbe 02:00.0 02:00.1
""" % locals() # replace items from local variables
# This is roughly compatible with check_output function in subprocess module
# which is only available in python 2.7.
def check_output(args, stderr=None):
'''Run a command and capture its output'''
return subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=stderr).communicate()[0]
def find_module(mod):
'''find the .ko file for kernel module named mod.
Searches the $RTE_SDK/$RTE_TARGET directory, the kernel
modules directory and finally under the parent directory of
the script '''
# check $RTE_SDK/$RTE_TARGET directory
if 'RTE_SDK' in os.environ and 'RTE_TARGET' in os.environ:
path = "%s/%s/kmod/%s.ko" % (os.environ['RTE_SDK'],\
os.environ['RTE_TARGET'], mod)
if exists(path):
return path
# check using depmod
try:
depmod_out = check_output(["modinfo", "-n", mod], \
stderr=subprocess.STDOUT).lower()
if "error" not in depmod_out:
path = depmod_out.strip()
if exists(path):
return path
except: # if modinfo can't find module, it fails, so continue
pass
# check for a copy based off current path
tools_dir = dirname(abspath(sys.argv[0]))
if (tools_dir.endswith("tools")):
base_dir = dirname(tools_dir)
find_out = check_output(["find", base_dir, "-name", mod + ".ko"])
if len(find_out) > 0: #something matched
path = find_out.splitlines()[0]
if exists(path):
return path
def check_modules():
'''Checks that igb_uio is loaded'''
global dpdk_drivers
fd = file("/proc/modules")
loaded_mods = fd.readlines()
fd.close()
# list of supported modules
mods = [{"Name" : driver, "Found" : False} for driver in dpdk_drivers]
# first check if module is loaded
for line in loaded_mods:
for mod in mods:
if line.startswith(mod["Name"]):
mod["Found"] = True
# special case for vfio_pci (module is named vfio-pci,
# but its .ko is named vfio_pci)
elif line.replace("_", "-").startswith(mod["Name"]):
mod["Found"] = True
# check if we have at least one loaded module
if True not in [mod["Found"] for mod in mods] and b_flag is not None:
if b_flag in dpdk_drivers:
print "Error - no supported modules(DPDK driver) are loaded"
sys.exit(1)
else:
print "Warning - no supported modules(DPDK driver) are loaded"
# change DPDK driver list to only contain drivers that are loaded
dpdk_drivers = [mod["Name"] for mod in mods if mod["Found"]]
def has_driver(dev_id):
'''return true if a device is assigned to a driver. False otherwise'''
return "Driver_str" in devices[dev_id]
def get_pci_device_details(dev_id):
'''This function gets additional details for a PCI device'''
device = {}
extra_info = check_output(["lspci", "-vmmks", dev_id]).splitlines()
# parse lspci details
for line in extra_info:
if len(line) == 0:
continue
name, value = line.split("\t", 1)
name = name.strip(":") + "_str"
device[name] = value
# check for a unix interface name
sys_path = "/sys/bus/pci/devices/%s/net/" % dev_id
if exists(sys_path):
device["Interface"] = ",".join(os.listdir(sys_path))
else:
device["Interface"] = ""
# check if a port is used for ssh connection
device["Ssh_if"] = False
device["Active"] = ""
return device
def get_nic_details():
'''This function populates the "devices" dictionary. The keys used are
the pci addresses (domain:bus:slot.func). The values are themselves
dictionaries - one for each NIC.'''
global devices
global dpdk_drivers
# clear any old data
devices = {}
# first loop through and read details for all devices
# request machine readable format, with numeric IDs
dev = {};
dev_lines = check_output(["lspci", "-Dvmmn"]).splitlines()
for dev_line in dev_lines:
if (len(dev_line) == 0):
if dev["Class"] == ETHERNET_CLASS:
#convert device and vendor ids to numbers, then add to global
dev["Vendor"] = int(dev["Vendor"],16)
dev["Device"] = int(dev["Device"],16)
devices[dev["Slot"]] = dict(dev) # use dict to make copy of dev
else:
name, value = dev_line.split("\t", 1)
dev[name.rstrip(":")] = value
# check what is the interface if any for an ssh connection if
# any to this host, so we can mark it later.
ssh_if = []
route = check_output(["ip", "-o", "route"])
# filter out all lines for 169.254 routes
route = "\n".join(filter(lambda ln: not ln.startswith("169.254"),
route.splitlines()))
rt_info = route.split()
for i in xrange(len(rt_info) - 1):
if rt_info[i] == "dev":
ssh_if.append(rt_info[i+1])
# based on the basic info, get extended text details
for d in devices.keys():
# get additional info and add it to existing data
devices[d] = dict(devices[d].items() +
get_pci_device_details(d).items())
for _if in ssh_if:
if _if in devices[d]["Interface"].split(","):
devices[d]["Ssh_if"] = True
devices[d]["Active"] = "*Active*"
break;
# add igb_uio to list of supporting modules if needed
if "Module_str" in devices[d]:
for driver in dpdk_drivers:
if driver not in devices[d]["Module_str"]:
devices[d]["Module_str"] = devices[d]["Module_str"] + ",%s" % driver
else:
devices[d]["Module_str"] = ",".join(dpdk_drivers)
# make sure the driver and module strings do not have any duplicates
if has_driver(d):
modules = devices[d]["Module_str"].split(",")
if devices[d]["Driver_str"] in modules:
modules.remove(devices[d]["Driver_str"])
devices[d]["Module_str"] = ",".join(modules)
def dev_id_from_dev_name(dev_name):
'''Take a device "name" - a string passed in by user to identify a NIC
device, and determine the device id - i.e. the domain:bus:slot.func - for
it, which can then be used to index into the devices array'''
dev = None
# check if it's already a suitable index
if dev_name in devices:
return dev_name
# check if it's an index just missing the domain part
elif "0000:" + dev_name in devices:
return "0000:" + dev_name
else:
# check if it's an interface name, e.g. eth1
for d in devices.keys():
if dev_name in devices[d]["Interface"].split(","):
return devices[d]["Slot"]
# if nothing else matches - error
print "Unknown device: %s. " \
"Please specify device in \"bus:slot.func\" format" % dev_name
sys.exit(1)
def unbind_one(dev_id, force):
'''Unbind the device identified by "dev_id" from its current driver'''
dev = devices[dev_id]
if not has_driver(dev_id):
print "%s %s %s is not currently managed by any driver\n" % \
(dev["Slot"], dev["Device_str"], dev["Interface"])
return
# prevent us disconnecting ourselves
if dev["Ssh_if"] and not force:
print "Routing table indicates that interface %s is active" \
". Skipping unbind" % (dev_id)
return
# write to /sys to unbind
filename = "/sys/bus/pci/drivers/%s/unbind" % dev["Driver_str"]
try:
f = open(filename, "a")
except:
print "Error: unbind failed for %s - Cannot open %s" % (dev_id, filename)
sys/exit(1)
f.write(dev_id)
f.close()
def bind_one(dev_id, driver, force):
'''Bind the device given by "dev_id" to the driver "driver". If the device
is already bound to a different driver, it will be unbound first'''
dev = devices[dev_id]
saved_driver = None # used to rollback any unbind in case of failure
# prevent disconnection of our ssh session
if dev["Ssh_if"] and not force:
print "Routing table indicates that interface %s is active" \
". Not modifying" % (dev_id)
return
# unbind any existing drivers we don't want
if has_driver(dev_id):
if dev["Driver_str"] == driver:
print "%s already bound to driver %s, skipping\n" % (dev_id, driver)
return
else:
saved_driver = dev["Driver_str"]
unbind_one(dev_id, force)
dev["Driver_str"] = "" # clear driver string
# if we are binding to one of DPDK drivers, add PCI id's to that driver
if driver in dpdk_drivers:
filename = "/sys/bus/pci/drivers/%s/new_id" % driver
try:
f = open(filename, "w")
except:
print "Error: bind failed for %s - Cannot open %s" % (dev_id, filename)
return
try:
f.write("%04x %04x" % (dev["Vendor"], dev["Device"]))
f.close()
except:
print "Error: bind failed for %s - Cannot write new PCI ID to " \
"driver %s" % (dev_id, driver)
return
# do the bind by writing to /sys
filename = "/sys/bus/pci/drivers/%s/bind" % driver
try:
f = open(filename, "a")
except:
print "Error: bind failed for %s - Cannot open %s" % (dev_id, filename)
if saved_driver is not None: # restore any previous driver
bind_one(dev_id, saved_driver, force)
return
try:
f.write(dev_id)
f.close()
except:
# for some reason, closing dev_id after adding a new PCI ID to new_id
# results in IOError. however, if the device was successfully bound,
# we don't care for any errors and can safely ignore IOError
tmp = get_pci_device_details(dev_id)
if "Driver_str" in tmp and tmp["Driver_str"] == driver:
return
print "Error: bind failed for %s - Cannot bind to driver %s" % (dev_id, driver)
if saved_driver is not None: # restore any previous driver
bind_one(dev_id, saved_driver, force)
return
def unbind_all(dev_list, force=False):
"""Unbind method, takes a list of device locations"""
dev_list = map(dev_id_from_dev_name, dev_list)
for d in dev_list:
unbind_one(d, force)
def bind_all(dev_list, driver, force=False):
"""Bind method, takes a list of device locations"""
global devices
dev_list = map(dev_id_from_dev_name, dev_list)
for d in dev_list:
bind_one(d, driver, force)
# when binding devices to a generic driver (i.e. one that doesn't have a
# PCI ID table), some devices that are not bound to any other driver could
# be bound even if no one has asked them to. hence, we check the list of
# drivers again, and see if some of the previously-unbound devices were
# erroneously bound.
for d in devices.keys():
# skip devices that were already bound or that we know should be bound
if "Driver_str" in devices[d] or d in dev_list:
continue
# update information about this device
devices[d] = dict(devices[d].items() +
get_pci_device_details(d).items())
# check if updated information indicates that the device was bound
if "Driver_str" in devices[d]:
unbind_one(d, force)
def display_devices(title, dev_list, extra_params = None):
'''Displays to the user the details of a list of devices given in "dev_list"
The "extra_params" parameter, if given, should contain a string with
%()s fields in it for replacement by the named fields in each device's
dictionary.'''
strings = [] # this holds the strings to print. We sort before printing
print "\n%s" % title
print "="*len(title)
if len(dev_list) == 0:
strings.append("<none>")
else:
for dev in dev_list:
if extra_params is not None:
strings.append("%s '%s' %s" % (dev["Slot"], \
dev["Device_str"], extra_params % dev))
else:
strings.append("%s '%s'" % (dev["Slot"], dev["Device_str"]))
# sort before printing, so that the entries appear in PCI order
strings.sort()
print "\n".join(strings) # print one per line
def show_status():
'''Function called when the script is passed the "--status" option. Displays
to the user what devices are bound to the igb_uio driver, the kernel driver
or to no driver'''
global dpdk_drivers
kernel_drv = []
dpdk_drv = []
no_drv = []
# split our list of devices into the three categories above
for d in devices.keys():
if not has_driver(d):
no_drv.append(devices[d])
continue
if devices[d]["Driver_str"] in dpdk_drivers:
dpdk_drv.append(devices[d])
else:
kernel_drv.append(devices[d])
# print each category separately, so we can clearly see what's used by DPDK
display_devices("Network devices using DPDK-compatible driver", dpdk_drv, \
"drv=%(Driver_str)s unused=%(Module_str)s")
display_devices("Network devices using kernel driver", kernel_drv,
"if=%(Interface)s drv=%(Driver_str)s unused=%(Module_str)s %(Active)s")
display_devices("Other network devices", no_drv,\
"unused=%(Module_str)s")
def parse_args():
'''Parses the command-line arguments given by the user and takes the
appropriate action for each'''
global b_flag
global status_flag
global force_flag
global args
if len(sys.argv) <= 1:
usage()
sys.exit(0)
try:
opts, args = getopt.getopt(sys.argv[1:], "b:u",
["help", "usage", "status", "force",
"bind=", "unbind"])
except getopt.GetoptError, error:
print str(error)
print "Run '%s --usage' for further information" % sys.argv[0]
sys.exit(1)
for opt, arg in opts:
if opt == "--help" or opt == "--usage":
usage()
sys.exit(0)
if opt == "--status":
status_flag = True
if opt == "--force":
force_flag = True
if opt == "-b" or opt == "-u" or opt == "--bind" or opt == "--unbind":
if b_flag is not None:
print "Error - Only one bind or unbind may be specified\n"
sys.exit(1)
if opt == "-u" or opt == "--unbind":
b_flag = "none"
else:
b_flag = arg
def do_arg_actions():
'''do the actual action requested by the user'''
global b_flag
global status_flag
global force_flag
global args
if b_flag is None and not status_flag:
print "Error: No action specified for devices. Please give a -b or -u option"
print "Run '%s --usage' for further information" % sys.argv[0]
sys.exit(1)
if b_flag is not None and len(args) == 0:
print "Error: No devices specified."
print "Run '%s --usage' for further information" % sys.argv[0]
sys.exit(1)
if b_flag == "none" or b_flag == "None":
unbind_all(args, force_flag)
elif b_flag is not None:
bind_all(args, b_flag, force_flag)
if status_flag:
if b_flag is not None:
get_nic_details() # refresh if we have changed anything
show_status()
def main():
'''program main function'''
parse_args()
check_modules()
get_nic_details()
do_arg_actions()
if __name__ == "__main__":
main()
| gpl-3.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Source/Python/Lib/python27/distutils/command/build_clib.py | 176 | 8131 | """distutils.command.build_clib
Implements the Distutils 'build_clib' command, to build a C/C++ library
that is included in the module distribution and needed by an extension
module."""
__revision__ = "$Id$"
# XXX this module has *lots* of code ripped-off quite transparently from
# build_ext.py -- not surprisingly really, as the work required to build
# a static library from a collection of C source files is not really all
# that different from what's required to build a shared object file from
# a collection of C source files. Nevertheless, I haven't done the
# necessary refactoring to account for the overlap in code between the
# two modules, mainly because a number of subtle details changed in the
# cut 'n paste. Sigh.
import os
from distutils.core import Command
from distutils.errors import DistutilsSetupError
from distutils.sysconfig import customize_compiler
from distutils import log
def show_compilers():
from distutils.ccompiler import show_compilers
show_compilers()
class build_clib(Command):
description = "build C/C++ libraries used by Python extensions"
user_options = [
('build-clib=', 'b',
"directory to build C/C++ libraries to"),
('build-temp=', 't',
"directory to put temporary build by-products"),
('debug', 'g',
"compile with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
('compiler=', 'c',
"specify the compiler type"),
]
boolean_options = ['debug', 'force']
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = 0
self.compiler = None
def finalize_options(self):
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options('build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'))
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self):
if not self.libraries:
return
# Yech -- this is cut 'n pasted from build_ext.py!
from distutils.ccompiler import new_compiler
self.compiler = new_compiler(compiler=self.compiler,
dry_run=self.dry_run,
force=self.force)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries):
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise DistutilsSetupError, \
"'libraries' option must be a list of tuples"
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise DistutilsSetupError, \
"each element of 'libraries' must a 2-tuple"
name, build_info = lib
if not isinstance(name, str):
raise DistutilsSetupError, \
"first element of each tuple in 'libraries' " + \
"must be a string (the library name)"
if '/' in name or (os.sep != '/' and os.sep in name):
raise DistutilsSetupError, \
("bad library name '%s': " +
"may not contain directory separators") % \
lib[0]
if not isinstance(build_info, dict):
raise DistutilsSetupError, \
"second element of each tuple in 'libraries' " + \
"must be a dictionary (build info)"
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for (lib_name, build_info) in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for (lib_name, build_info) in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), "
"'sources' must be present and must be "
"a list of source filenames") % lib_name
filenames.extend(sources)
return filenames
def build_libraries(self, libraries):
for (lib_name, build_info) in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError, \
("in 'libraries' option (library '%s'), " +
"'sources' must be present and must be " +
"a list of source filenames") % lib_name
sources = list(sources)
log.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(objects, lib_name,
output_dir=self.build_clib,
debug=self.debug)
| mit |
mancoast/CPythonPyc_test | cpython/221_test_compare.py | 10 | 1314 | import sys
from test_support import *
class Empty:
def __repr__(self):
return '<Empty>'
class Coerce:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<Coerce %s>' % self.arg
def __coerce__(self, other):
if isinstance(other, Coerce):
return self.arg, other.arg
else:
return self.arg, other
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
candidates = [2, 2.0, 2L, 2+0j, [1], (3,), None, Empty(), Coerce(2), Cmp(2.0)]
def test():
for a in candidates:
for b in candidates:
try:
x = a == b
except:
print 'cmp(%s, %s) => %s' % (a, b, sys.exc_info()[0])
else:
if x:
print "%s == %s" % (a, b)
else:
print "%s != %s" % (a, b)
# Ensure default comparison compares id() of args
L = []
for i in range(10):
L.insert(len(L)//2, Empty())
for a in L:
for b in L:
if cmp(a, b) != cmp(id(a), id(b)):
print "ERROR:", cmp(a, b), cmp(id(a), id(b)), id(a), id(b)
test()
| gpl-3.0 |
virneo/nupic | examples/opf/experiments/multistep/hotgym/permutations_sp.py | 35 | 4546 | #! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by ExpGenerator to generate the actual
permutations.py file by replacing $XXXXXXXX tokens with desired values.
This permutations.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.pyc'
"""
from nupic.swarming.permutationhelpers import *
# The name of the field being predicted. Any allowed permutation MUST contain
# the prediction field.
# (generated from PREDICTION_FIELD)
predictedField = 'consumption'
ENC_WIDTH = 21
permutations = {
'inferenceType': 'NontemporalMultiStep',
'tpEnable': False,
# Encoder permutation choices
# Example:
#
# '__gym_encoder' : PermuteEncoder('gym', 'SDRCategoryEncoder', w=7,
# n=100),
#
# '__address_encoder' : PermuteEncoder('address', 'SDRCategoryEncoder',
# w=7, n=100),
#
# '__timestamp_timeOfDay_encoder' : PermuteEncoder('timestamp',
# 'DateEncoder.timeOfDay', w=7, radius=PermuteChoices([1, 8])),
#
# '__timestamp_dayOfWeek_encoder' : PermuteEncoder('timestamp',
# 'DateEncoder.dayOfWeek', w=7, radius=PermuteChoices([1, 3])),
#
# '__consumption_encoder' : PermuteEncoder('consumption', 'ScalarEncoder',
# w=7, n=PermuteInt(13, 500, 20), minval=0,
# maxval=PermuteInt(100, 300, 25)),
#
# (generated from PERM_ENCODER_CHOICES)
'__timestamp_timeOfDay_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.timeOfDay',
w=ENC_WIDTH, radius=PermuteFloat(0.5, 12)),
'__timestamp_dayOfWeek_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.dayOfWeek', w=ENC_WIDTH,
radius=PermuteFloat(1, 6)),
'__timestamp_weekend_encoder' : PermuteEncoder(fieldName='timestamp',
encoderClass='DateEncoder.weekend', w=ENC_WIDTH,
radius=PermuteChoices([1])),
'__consumption_encoder' : PermuteEncoder(fieldName='consumption',
encoderClass='AdaptiveScalarEncoder', w=ENC_WIDTH,
n=PermuteInt(28, 521), clipInput=True),
'tpSegmentActivationThreshold': 14,
'tpMinSegmentMatchSynapseThreshold': 12,
}
# Fields selected for final hypersearch report;
# NOTE: These values are used as regular expressions by RunPermutations.py's
# report generator
# (fieldname values generated from PERM_PREDICTED_FIELD_NAME)
report = [
'.*consumption.*',
]
# Permutation optimization setting: either minimize or maximize metric
# used by RunPermutations.
# NOTE: The value is used as a regular expressions by RunPermutations.py's
# report generator
# (generated from minimize = 'prediction:aae:window=1000:field=consumption')
minimize = "multiStepBestPredictions:multiStep:errorMetric='aae':steps=1:window=1000:field=consumption"
def permutationFilter(perm):
""" This function can be used to selectively filter out specific permutation
combinations. It is called by RunPermutations for every possible permutation
of the variables in the permutations dict. It should return True for valid a
combination of permutation values and False for an invalid one.
Parameters:
---------------------------------------------------------
perm: dict of one possible combination of name:value
pairs chosen from permutations.
"""
# An example of how to use this
#if perm['__consumption_encoder']['maxval'] > 300:
# return False;
#
return True
| agpl-3.0 |
frankrousseau/weboob | modules/ameli/pages.py | 2 | 5994 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Christophe Lampin
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
import re
import urllib
from decimal import Decimal
from weboob.deprecated.browser import Page, BrokenPageError
from weboob.capabilities.bill import Subscription, Detail, Bill
# Ugly array to avoid the use of french locale
FRENCH_MONTHS = [u'janvier', u'février', u'mars', u'avril', u'mai', u'juin', u'juillet', u'août', u'septembre', u'octobre', u'novembre', u'décembre']
class AmeliBasePage(Page):
def is_logged(self):
try:
self.parser.select(self.document.getroot(), 'a.logout', 1)
except BrokenPageError:
logged = False
else:
logged = True
self.logger.debug('logged: %s' % (logged))
return logged
class LoginPage(AmeliBasePage):
def login(self, login, password):
self.browser.select_form('connexionCompteForm')
self.browser["connexioncompte_2numSecuriteSociale"] = login.encode('utf8')
self.browser["connexioncompte_2codeConfidentiel"] = password.encode('utf8')
self.browser.submit()
class HomePage(AmeliBasePage):
pass
class AccountPage(AmeliBasePage):
def iter_subscription_list(self):
idents = self.document.xpath('//div[contains(@class, "blocfond")]')
enfants = 0
for ident in idents:
if len(ident.xpath('.//h3')) == 0:
continue
name = self.parser.tocleanstring(ident.xpath('.//h3')[0])
lis = ident.xpath('.//li')
if len(lis) > 3:
number = re.sub('[^\d]+', '', ident.xpath('.//li')[3].text)
else:
enfants = enfants + 1
number = "AFFILIE" + str(enfants)
sub = Subscription(number)
sub._id = number
sub.label = unicode(name)
sub.subscriber = unicode(name)
yield sub
class LastPaymentsPage(AmeliBasePage):
def iter_last_payments(self):
list_table = self.document.xpath('//table[@id="tabDerniersPaiements"]')
if len(list_table) > 0:
table = list_table[0].xpath('.//tr')
for tr in table:
list_a = tr.xpath('.//a')
if len(list_a) == 0:
continue
yield list_a[0].attrib.get('href')
class PaymentDetailsPage(AmeliBasePage):
def iter_payment_details(self, sub):
if sub._id.isdigit():
idx = 0
else:
idx = sub._id.replace('AFFILIE', '')
if len(self.document.xpath('//div[@class="centrepage"]/h2')) > idx or self.document.xpath('//table[@id="DetailPaiement3"]') > idx:
id_str = self.document.xpath('//div[@class="centrepage"]/h2')[idx].text.strip()
m = re.match('.*le (.*) pour un montant de.*', id_str)
if m:
id_str = m.group(1)
id_date = datetime.strptime(id_str, '%d/%m/%Y').date()
id = sub._id + "." + datetime.strftime(id_date, "%Y%m%d")
table = self.document.xpath('//table[@class="tableau"]')[idx].xpath('.//tr')
line = 1
last_date = None
for tr in table:
tds = tr.xpath('.//td')
if len(tds) == 0:
continue
date_str = tds[0].text
det = Detail()
det.id = id + "." + str(line)
det.label = unicode(tds[1].text.strip())
if date_str is None or date_str == '':
det.infos = u''
det.datetime = last_date
else:
det.infos = u'Payé ' + unicode(re.sub('[^\d,-]+', '', tds[2].text)) + u'€ / Base ' + unicode(re.sub('[^\d,-]+', '', tds[3].text)) + u'€ / Taux ' + unicode(re.sub('[^\d,-]+', '', tds[4].text)) + '%'
det.datetime = datetime.strptime(date_str, '%d/%m/%Y').date()
last_date = det.datetime
det.price = Decimal(re.sub('[^\d,-]+', '', tds[5].text).replace(',', '.'))
line = line + 1
yield det
class BillsPage(AmeliBasePage):
def iter_bills(self, sub):
table = self.document.xpath('//table[@id="tableauDecompte"]')[0].xpath('.//tr')
for tr in table:
list_tds = tr.xpath('.//td')
if len(list_tds) == 0:
continue
date_str = list_tds[0].text
month_str = date_str.split()[0]
date = datetime.strptime(re.sub(month_str, str(FRENCH_MONTHS.index(month_str) + 1), date_str), "%m %Y").date()
amount = list_tds[1].text
if amount is None:
continue
amount = re.sub(' euros', '', amount)
bil = Bill()
bil.id = sub._id + "." + date.strftime("%Y%m")
bil.date = date
bil.label = u''+amount.strip()
bil.format = u'pdf'
filedate = date.strftime("%m%Y")
bil._url = '/PortailAS/PDFServletReleveMensuel.dopdf'
bil._args = {'PDF.moisRecherche': filedate}
yield bil
def get_bill(self, bill):
self.location(bill._url, urllib.urlencode(bill._args))
| agpl-3.0 |
ddico/odoo | addons/website/models/ir_ui_view.py | 1 | 25152 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import os
import uuid
import werkzeug
from odoo import api, fields, models
from odoo import tools
from odoo.addons import website
from odoo.addons.http_routing.models.ir_http import url_for
from odoo.exceptions import AccessError
from odoo.osv import expression
from odoo.http import request
_logger = logging.getLogger(__name__)
class View(models.Model):
_name = "ir.ui.view"
_inherit = ["ir.ui.view", "website.seo.metadata"]
customize_show = fields.Boolean("Show As Optional Inherit", default=False)
website_id = fields.Many2one('website', ondelete='cascade', string="Website")
page_ids = fields.One2many('website.page', 'view_id')
first_page_id = fields.Many2one('website.page', string='Website Page', help='First page linked to this view', compute='_compute_first_page_id')
track = fields.Boolean(string='Track', default=False, help="Allow to specify for one page of the website to be trackable or not")
visibility = fields.Selection([('', 'All'), ('connected', 'Signed In'), ('restricted_group', 'Restricted Group'), ('password', 'With Password')], default='')
visibility_password = fields.Char(groups='base.group_system', copy=False)
visibility_password_display = fields.Char(compute='_get_pwd', inverse='_set_pwd', groups='website.group_website_designer')
@api.depends('visibility_password')
def _get_pwd(self):
for r in self:
r.visibility_password_display = r.sudo().visibility_password and '********' or ''
def _set_pwd(self):
crypt_context = self.env.user._crypt_context()
for r in self:
if r.type == 'qweb':
r.sudo().visibility_password = r.visibility_password_display and crypt_context.encrypt(r.visibility_password_display) or ''
r.visibility = r.visibility # double check access
def _compute_first_page_id(self):
for view in self:
view.first_page_id = self.env['website.page'].search([('view_id', '=', view.id)], limit=1)
def name_get(self):
if (not self._context.get('display_website') and not self.env.user.has_group('website.group_multi_website')) or \
not self._context.get('display_website'):
return super(View, self).name_get()
res = []
for view in self:
view_name = view.name
if view.website_id:
view_name += ' [%s]' % view.website_id.name
res.append((view.id, view_name))
return res
def write(self, vals):
'''COW for ir.ui.view. This way editing websites does not impact other
websites. Also this way newly created websites will only
contain the default views.
'''
current_website_id = self.env.context.get('website_id')
if not current_website_id or self.env.context.get('no_cow'):
return super(View, self).write(vals)
# We need to consider inactive views when handling multi-website cow
# feature (to copy inactive children views, to search for specific
# views, ...)
for view in self.with_context(active_test=False):
# Make sure views which are written in a website context receive
# a value for their 'key' field
if not view.key and not vals.get('key'):
view.with_context(no_cow=True).key = 'website.key_%s' % str(uuid.uuid4())[:6]
# No need of COW if the view is already specific
if view.website_id:
super(View, view).write(vals)
continue
# Ensure the cache of the pages stay consistent when doing COW.
# This is necessary when writing view fields from a page record
# because the generic page will put the given values on its cache
# but in reality the values were only meant to go on the specific
# page. Invalidate all fields and not only those in vals because
# other fields could have been changed implicitly too.
pages = view.page_ids
pages.flush(records=pages)
pages.invalidate_cache(ids=pages.ids)
# If already a specific view for this generic view, write on it
website_specific_view = view.search([
('key', '=', view.key),
('website_id', '=', current_website_id)
], limit=1)
if website_specific_view:
super(View, website_specific_view).write(vals)
continue
# Set key to avoid copy() to generate an unique key as we want the
# specific view to have the same key
copy_vals = {'website_id': current_website_id, 'key': view.key}
# Copy with the 'inherit_id' field value that will be written to
# ensure the copied view's validation works
if vals.get('inherit_id'):
copy_vals['inherit_id'] = vals['inherit_id']
website_specific_view = view.copy(copy_vals)
view._create_website_specific_pages_for_view(website_specific_view,
view.env['website'].browse(current_website_id))
for inherit_child in view.inherit_children_ids.filter_duplicate().sorted(key=lambda v: (v.priority, v.id)):
if inherit_child.website_id.id == current_website_id:
# In the case the child was already specific to the current
# website, we cannot just reattach it to the new specific
# parent: we have to copy it there and remove it from the
# original tree. Indeed, the order of children 'id' fields
# must remain the same so that the inheritance is applied
# in the same order in the copied tree.
child = inherit_child.copy({'inherit_id': website_specific_view.id, 'key': inherit_child.key})
inherit_child.inherit_children_ids.write({'inherit_id': child.id})
inherit_child.unlink()
else:
# Trigger COW on inheriting views
inherit_child.write({'inherit_id': website_specific_view.id})
super(View, website_specific_view).write(vals)
return True
def _create_all_specific_views(self, processed_modules):
""" When creating a generic child view, we should
also create that view under specific view trees (COW'd).
Top level view (no inherit_id) do not need that behavior as they
will be shared between websites since there is no specific yet.
"""
# Only for the modules being processed
regex = '^(%s)[.]' % '|'.join(processed_modules)
# Retrieve the views through a SQl query to avoid ORM queries inside of for loop
# Retrieves all the views that are missing their specific counterpart with all the
# specific view parent id and their website id in one query
query = """
SELECT generic.id, ARRAY[array_agg(spec_parent.id), array_agg(spec_parent.website_id)]
FROM ir_ui_view generic
INNER JOIN ir_ui_view generic_parent ON generic_parent.id = generic.inherit_id
INNER JOIN ir_ui_view spec_parent ON spec_parent.key = generic_parent.key
LEFT JOIN ir_ui_view specific ON specific.key = generic.key AND specific.website_id = spec_parent.website_id
WHERE generic.type='qweb'
AND generic.website_id IS NULL
AND generic.key ~ %s
AND spec_parent.website_id IS NOT NULL
AND specific.id IS NULL
GROUP BY generic.id
"""
self.env.cr.execute(query, (regex, ))
result = dict(self.env.cr.fetchall())
for record in self.browse(result.keys()):
specific_parent_view_ids, website_ids = result[record.id]
for specific_parent_view_id, website_id in zip(specific_parent_view_ids, website_ids):
record.with_context(website_id=website_id).write({
'inherit_id': specific_parent_view_id,
})
super(View, self)._create_all_specific_views(processed_modules)
def unlink(self):
'''This implements COU (copy-on-unlink). When deleting a generic page
website-specific pages will be created so only the current
website is affected.
'''
current_website_id = self._context.get('website_id')
if current_website_id and not self._context.get('no_cow'):
for view in self.filtered(lambda view: not view.website_id):
for w in self.env['website'].search([('id', '!=', current_website_id)]):
# reuse the COW mechanism to create
# website-specific copies, it will take
# care of creating pages and menus.
view.with_context(website_id=w.id).write({'name': view.name})
specific_views = self.env['ir.ui.view']
if self and self.pool._init:
for view in self.filtered(lambda view: not view.website_id):
specific_views += view._get_specific_views()
result = super(View, self + specific_views).unlink()
self.clear_caches()
return result
def _create_website_specific_pages_for_view(self, new_view, website):
for page in self.page_ids:
# create new pages for this view
new_page = page.copy({
'view_id': new_view.id,
'is_published': page.is_published,
})
page.menu_ids.filtered(lambda m: m.website_id.id == website.id).page_id = new_page.id
@api.model
def get_related_views(self, key, bundles=False):
'''Make this only return most specific views for website.'''
# get_related_views can be called through website=False routes
# (e.g. /web_editor/get_assets_editor_resources), so website
# dispatch_parameters may not be added. Manually set
# website_id. (It will then always fallback on a website, this
# method should never be called in a generic context, even for
# tests)
self = self.with_context(website_id=self.env['website'].get_current_website().id)
return super(View, self).get_related_views(key, bundles=bundles)
def filter_duplicate(self):
""" Filter current recordset only keeping the most suitable view per distinct key.
Every non-accessible view will be removed from the set:
* In non website context, every view with a website will be removed
* In a website context, every view from another website
"""
current_website_id = self._context.get('website_id')
most_specific_views = self.env['ir.ui.view']
if not current_website_id:
return self.filtered(lambda view: not view.website_id)
for view in self:
# specific view: add it if it's for the current website and ignore
# it if it's for another website
if view.website_id and view.website_id.id == current_website_id:
most_specific_views |= view
# generic view: add it only if, for the current website, there is no
# specific view for this view (based on the same `key` attribute)
elif not view.website_id and not any(view.key == view2.key and view2.website_id and view2.website_id.id == current_website_id for view2 in self):
most_specific_views |= view
return most_specific_views
@api.model
def _view_get_inherited_children(self, view):
extensions = super(View, self)._view_get_inherited_children(view)
return extensions.filter_duplicate()
@api.model
def _view_obj(self, view_id):
''' Given an xml_id or a view_id, return the corresponding view record.
In case of website context, return the most specific one.
:param view_id: either a string xml_id or an integer view_id
:return: The view record or empty recordset
'''
if isinstance(view_id, str) or isinstance(view_id, int):
return self.env['website'].viewref(view_id)
else:
# It can already be a view object when called by '_views_get()' that is calling '_view_obj'
# for it's inherit_children_ids, passing them directly as object record. (Note that it might
# be a view_id from another website but it will be filtered in 'get_related_views()')
return view_id if view_id._name == 'ir.ui.view' else self.env['ir.ui.view']
@api.model
def _get_inheriting_views_arch_domain(self, model):
domain = super(View, self)._get_inheriting_views_arch_domain(model)
current_website = self.env['website'].browse(self._context.get('website_id'))
website_views_domain = current_website.website_domain()
# when rendering for the website we have to include inactive views
# we will prefer inactive website-specific views over active generic ones
if current_website:
domain = [leaf for leaf in domain if 'active' not in leaf]
return expression.AND([website_views_domain, domain])
@api.model
def get_inheriting_views_arch(self, model):
if not self._context.get('website_id'):
return super(View, self).get_inheriting_views_arch(model)
views = super(View, self.with_context(active_test=False)).get_inheriting_views_arch(model)
# prefer inactive website-specific views over active generic ones
return views.filter_duplicate().filtered('active')
@api.model
def _get_filter_xmlid_query(self):
"""This method add some specific view that do not have XML ID
"""
if not self._context.get('website_id'):
return super()._get_filter_xmlid_query()
else:
return """SELECT res_id
FROM ir_model_data
WHERE res_id IN %(res_ids)s
AND model = 'ir.ui.view'
AND module IN %(modules)s
UNION
SELECT sview.id
FROM ir_ui_view sview
INNER JOIN ir_ui_view oview USING (key)
INNER JOIN ir_model_data d
ON oview.id = d.res_id
AND d.model = 'ir.ui.view'
AND d.module IN %(modules)s
WHERE sview.id IN %(res_ids)s
AND sview.website_id IS NOT NULL
AND oview.website_id IS NULL;
"""
@api.model
@tools.ormcache_context('self.env.uid', 'self.env.su', 'xml_id', keys=('website_id',))
def get_view_id(self, xml_id):
"""If a website_id is in the context and the given xml_id is not an int
then try to get the id of the specific view for that website, but
fallback to the id of the generic view if there is no specific.
If no website_id is in the context, it might randomly return the generic
or the specific view, so it's probably not recommanded to use this
method. `viewref` is probably more suitable.
Archived views are ignored (unless the active_test context is set, but
then the ormcache_context will not work as expected).
"""
if 'website_id' in self._context and not isinstance(xml_id, int):
current_website = self.env['website'].browse(self._context.get('website_id'))
domain = ['&', ('key', '=', xml_id)] + current_website.website_domain()
view = self.sudo().search(domain, order='website_id', limit=1)
if not view:
_logger.warning("Could not find view object with xml_id '%s'", xml_id)
raise ValueError('View %r in website %r not found' % (xml_id, self._context['website_id']))
return view.id
return super(View, self.sudo()).get_view_id(xml_id)
@api.model
def read_template(self, xml_id):
view = self._view_obj(self.get_view_id(xml_id))
if view.visibility and view._handle_visibility(do_raise=False):
self = self.sudo()
return super(View, self).read_template(xml_id)
def _get_original_view(self):
"""Given a view, retrieve the original view it was COW'd from.
The given view might already be the original one. In that case it will
(and should) return itself.
"""
self.ensure_one()
domain = [('key', '=', self.key), ('model_data_id', '!=', None)]
return self.with_context(active_test=False).search(domain, limit=1) # Useless limit has multiple xmlid should not be possible
def _handle_visibility(self, do_raise=True):
""" Check the visibility set on the main view and raise 403 if you should not have access.
Order is: Public, Connected, Has group, Password
It only check the visibility on the main content, others views called stay available in rpc.
"""
error = False
self = self.sudo()
if self.visibility and not request.env.user.has_group('website.group_website_designer'):
if (self.visibility == 'connected' and request.website.is_public_user()):
error = werkzeug.exceptions.Forbidden()
elif self.visibility == 'password' and \
(request.website.is_public_user() or self.id not in request.session.get('views_unlock', [])):
pwd = request.params.get('visibility_password')
if pwd and self.env.user._crypt_context().verify(
pwd, self.sudo().visibility_password):
request.session.setdefault('views_unlock', list()).append(self.id)
else:
error = werkzeug.exceptions.Forbidden('website_visibility_password_required')
# elif self.visibility == 'restricted_group' and self.groups_id: or if groups_id set from backend
try:
self._check_view_access()
except AccessError:
error = werkzeug.exceptions.Forbidden()
if error:
if do_raise:
raise error
else:
return False
return True
def _render(self, values=None, engine='ir.qweb', minimal_qcontext=False):
""" Render the template. If website is enabled on request, then extend rendering context with website values. """
self._handle_visibility(do_raise=True)
new_context = dict(self._context)
if request and getattr(request, 'is_frontend', False):
editable = request.website.is_publisher()
translatable = editable and self._context.get('lang') != request.website.default_lang_id.code
editable = not translatable and editable
# in edit mode ir.ui.view will tag nodes
if not translatable and not self.env.context.get('rendering_bundle'):
if editable:
new_context = dict(self._context, inherit_branding=True)
elif request.env.user.has_group('website.group_website_publisher'):
new_context = dict(self._context, inherit_branding_auto=True)
if values and 'main_object' in values:
if request.env.user.has_group('website.group_website_publisher'):
func = getattr(values['main_object'], 'get_backend_menu_id', False)
values['backend_menu_id'] = func and func() or self.env.ref('website.menu_website_configuration').id
if self._context != new_context:
self = self.with_context(new_context)
return super(View, self)._render(values, engine=engine, minimal_qcontext=minimal_qcontext)
@api.model
def _prepare_qcontext(self):
""" Returns the qcontext : rendering context with website specific value (required
to render website layout template)
"""
qcontext = super(View, self)._prepare_qcontext()
if request and getattr(request, 'is_frontend', False):
Website = self.env['website']
editable = request.website.is_publisher()
translatable = editable and self._context.get('lang') != request.env['ir.http']._get_default_lang().code
editable = not translatable and editable
cur = Website.get_current_website()
if self.env.user.has_group('website.group_website_publisher') and self.env.user.has_group('website.group_multi_website'):
qcontext['multi_website_websites_current'] = {'website_id': cur.id, 'name': cur.name, 'domain': cur._get_http_domain()}
qcontext['multi_website_websites'] = [
{'website_id': website.id, 'name': website.name, 'domain': website._get_http_domain()}
for website in Website.search([]) if website != cur
]
cur_company = self.env.company
qcontext['multi_website_companies_current'] = {'company_id': cur_company.id, 'name': cur_company.name}
qcontext['multi_website_companies'] = [
{'company_id': comp.id, 'name': comp.name}
for comp in self.env.user.company_ids if comp != cur_company
]
qcontext.update(dict(
self._context.copy(),
main_object=self,
website=request.website,
is_view_active=request.website.is_view_active,
url_for=url_for,
res_company=request.website.company_id.sudo(),
languages=request.env['res.lang'].get_available(),
translatable=translatable,
editable=editable,
))
return qcontext
@api.model
def get_default_lang_code(self):
website_id = self.env.context.get('website_id')
if website_id:
lang_code = self.env['website'].browse(website_id).default_lang_id.code
return lang_code
else:
return super(View, self).get_default_lang_code()
def redirect_to_page_manager(self):
return {
'type': 'ir.actions.act_url',
'url': '/website/pages',
'target': 'self',
}
def _read_template_keys(self):
return super(View, self)._read_template_keys() + ['website_id']
@api.model
def _save_oe_structure_hook(self):
res = super(View, self)._save_oe_structure_hook()
res['website_id'] = self.env['website'].get_current_website().id
return res
@api.model
def _set_noupdate(self):
'''If website is installed, any call to `save` from the frontend will
actually write on the specific view (or create it if not exist yet).
In that case, we don't want to flag the generic view as noupdate.
'''
if not self._context.get('website_id'):
super(View, self)._set_noupdate()
def save(self, value, xpath=None):
self.ensure_one()
current_website = self.env['website'].get_current_website()
# xpath condition is important to be sure we are editing a view and not
# a field as in that case `self` might not exist (check commit message)
if xpath and self.key and current_website:
# The first time a generic view is edited, if multiple editable parts
# were edited at the same time, multiple call to this method will be
# done but the first one may create a website specific view. So if there
# already is a website specific view, we need to divert the super to it.
website_specific_view = self.env['ir.ui.view'].search([
('key', '=', self.key),
('website_id', '=', current_website.id)
], limit=1)
if website_specific_view:
self = website_specific_view
super(View, self).save(value, xpath=xpath)
# --------------------------------------------------------------------------
# Snippet saving
# --------------------------------------------------------------------------
@api.model
def _get_default_snippet_thumbnail(self, snippet_class=None):
if snippet_class:
for path in website.__path__:
if os.path.isfile(path + '/static/src/img/snippets_thumbs/%s.png' % snippet_class):
return '/website/static/src/img/snippets_thumbs/%s.png' % snippet_class
return super()._get_default_snippet_thumbnail()
@api.model
def _snippet_save_view_values_hook(self):
res = super()._snippet_save_view_values_hook()
website_id = self.env.context.get('website_id')
if website_id:
res['website_id'] = website_id
return res
| agpl-3.0 |
cyberark-bizdev/ansible | lib/ansible/utils/module_docs_fragments/ipa.py | 27 | 2627 | # Copyright (c) 2017-18, Ansible Project
# Copyright (c) 2017-18, Abhijeet Kasurde (akasurde@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Parameters for FreeIPA/IPA modules
DOCUMENTATION = '''
options:
ipa_port:
description:
- Port of FreeIPA / IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PORT) will be used instead.
- If both the environment variable C(IPA_PORT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: 443
ipa_host:
description:
- IP or hostname of IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_HOST) will be used instead.
- If both the environment variable C(IPA_HOST) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: ipa.example.com
ipa_user:
description:
- Administrative account used on IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_USER) will be used instead.
- If both the environment variable C(IPA_USER) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: admin
ipa_pass:
description:
- Password of administrative user.
- If the value is not specified in the task, the value of environment variable C(IPA_PASS) will be used instead.
- If both the environment variable C(IPA_PASS) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
required: true
ipa_prot:
description:
- Protocol used by IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PROT) will be used instead.
- If both the environment variable C(IPA_PROT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: https
choices: ["http", "https"]
validate_certs:
description:
- This only applies if C(ipa_prot) is I(https).
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
default: true
'''
| gpl-3.0 |
40223220/2015cc | static/Brython3.1.1-20150328-091302/Lib/datetime.py | 628 | 75044 | """Concrete date/time and related types.
See http://www.iana.org/time-zones/repository/tz-link.html for
time zone and DST data sources.
"""
import time as _time
import math as _math
def _cmp(x, y):
return 0 if x == y else 1 if x > y else -1
MINYEAR = 1
MAXYEAR = 9999
_MAXORDINAL = 3652059 # date.max.toordinal()
# Utility functions, adapted from Python's Demo/classes/Dates.py, which
# also assumes the current Gregorian calendar indefinitely extended in
# both directions. Difference: Dates.py calls January 1 of year 0 day
# number 1. The code here calls January 1 of year 1 day number 1. This is
# to match the definition of the "proleptic Gregorian" calendar in Dershowitz
# and Reingold's "Calendrical Calculations", where it's the base calendar
# for all computations. See the book for algorithms for converting between
# proleptic Gregorian ordinals and many other calendar systems.
_DAYS_IN_MONTH = [None, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
_DAYS_BEFORE_MONTH = [None]
dbm = 0
for dim in _DAYS_IN_MONTH[1:]:
_DAYS_BEFORE_MONTH.append(dbm)
dbm += dim
del dbm, dim
def _is_leap(year):
"year -> 1 if leap year, else 0."
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def _days_before_year(year):
"year -> number of days before January 1st of year."
y = year - 1
return y*365 + y//4 - y//100 + y//400
def _days_in_month(year, month):
"year, month -> number of days in that month in that year."
assert 1 <= month <= 12, month
if month == 2 and _is_leap(year):
return 29
return _DAYS_IN_MONTH[month]
def _days_before_month(year, month):
"year, month -> number of days in year preceding first day of month."
assert 1 <= month <= 12, 'month must be in 1..12'
return _DAYS_BEFORE_MONTH[month] + (month > 2 and _is_leap(year))
def _ymd2ord(year, month, day):
"year, month, day -> ordinal, considering 01-Jan-0001 as day 1."
assert 1 <= month <= 12, 'month must be in 1..12'
dim = _days_in_month(year, month)
assert 1 <= day <= dim, ('day must be in 1..%d' % dim)
return (_days_before_year(year) +
_days_before_month(year, month) +
day)
_DI400Y = _days_before_year(401) # number of days in 400 years
_DI100Y = _days_before_year(101) # " " " " 100 "
_DI4Y = _days_before_year(5) # " " " " 4 "
# A 4-year cycle has an extra leap day over what we'd get from pasting
# together 4 single years.
assert _DI4Y == 4 * 365 + 1
# Similarly, a 400-year cycle has an extra leap day over what we'd get from
# pasting together 4 100-year cycles.
assert _DI400Y == 4 * _DI100Y + 1
# OTOH, a 100-year cycle has one fewer leap day than we'd get from
# pasting together 25 4-year cycles.
assert _DI100Y == 25 * _DI4Y - 1
def _ord2ymd(n):
"ordinal -> (year, month, day), considering 01-Jan-0001 as day 1."
# n is a 1-based index, starting at 1-Jan-1. The pattern of leap years
# repeats exactly every 400 years. The basic strategy is to find the
# closest 400-year boundary at or before n, then work with the offset
# from that boundary to n. Life is much clearer if we subtract 1 from
# n first -- then the values of n at 400-year boundaries are exactly
# those divisible by _DI400Y:
#
# D M Y n n-1
# -- --- ---- ---------- ----------------
# 31 Dec -400 -_DI400Y -_DI400Y -1
# 1 Jan -399 -_DI400Y +1 -_DI400Y 400-year boundary
# ...
# 30 Dec 000 -1 -2
# 31 Dec 000 0 -1
# 1 Jan 001 1 0 400-year boundary
# 2 Jan 001 2 1
# 3 Jan 001 3 2
# ...
# 31 Dec 400 _DI400Y _DI400Y -1
# 1 Jan 401 _DI400Y +1 _DI400Y 400-year boundary
n -= 1
n400, n = divmod(n, _DI400Y)
year = n400 * 400 + 1 # ..., -399, 1, 401, ...
# Now n is the (non-negative) offset, in days, from January 1 of year, to
# the desired date. Now compute how many 100-year cycles precede n.
# Note that it's possible for n100 to equal 4! In that case 4 full
# 100-year cycles precede the desired day, which implies the desired
# day is December 31 at the end of a 400-year cycle.
n100, n = divmod(n, _DI100Y)
# Now compute how many 4-year cycles precede it.
n4, n = divmod(n, _DI4Y)
# And now how many single years. Again n1 can be 4, and again meaning
# that the desired day is December 31 at the end of the 4-year cycle.
n1, n = divmod(n, 365)
year += n100 * 100 + n4 * 4 + n1
if n1 == 4 or n100 == 4:
assert n == 0
return year-1, 12, 31
# Now the year is correct, and n is the offset from January 1. We find
# the month via an estimate that's either exact or one too large.
leapyear = n1 == 3 and (n4 != 24 or n100 == 3)
assert leapyear == _is_leap(year)
month = (n + 50) >> 5
preceding = _DAYS_BEFORE_MONTH[month] + (month > 2 and leapyear)
if preceding > n: # estimate is too large
month -= 1
preceding -= _DAYS_IN_MONTH[month] + (month == 2 and leapyear)
n -= preceding
assert 0 <= n < _days_in_month(year, month)
# Now the year and month are correct, and n is the offset from the
# start of that month: we're done!
return year, month, n+1
# Month and day names. For localized versions, see the calendar module.
_MONTHNAMES = [None, "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
_DAYNAMES = [None, "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
def _build_struct_time(y, m, d, hh, mm, ss, dstflag):
wday = (_ymd2ord(y, m, d) + 6) % 7
dnum = _days_before_month(y, m) + d
return _time.struct_time((y, m, d, hh, mm, ss, wday, dnum, dstflag))
def _format_time(hh, mm, ss, us):
# Skip trailing microseconds when us==0.
result = "%02d:%02d:%02d" % (hh, mm, ss)
if us:
result += ".%06d" % us
return result
# Correctly substitute for %z and %Z escapes in strftime formats.
def _wrap_strftime(object, format, timetuple):
# Don't call utcoffset() or tzname() unless actually needed.
freplace = None # the string to use for %f
zreplace = None # the string to use for %z
Zreplace = None # the string to use for %Z
# Scan format for %z and %Z escapes, replacing as needed.
newformat = []
push = newformat.append
i, n = 0, len(format)
while i < n:
ch = format[i]
i += 1
if ch == '%':
if i < n:
ch = format[i]
i += 1
if ch == 'f':
if freplace is None:
freplace = '%06d' % getattr(object,
'microsecond', 0)
newformat.append(freplace)
elif ch == 'z':
if zreplace is None:
zreplace = ""
if hasattr(object, "utcoffset"):
offset = object.utcoffset()
if offset is not None:
sign = '+'
if offset.days < 0:
offset = -offset
sign = '-'
h, m = divmod(offset, timedelta(hours=1))
assert not m % timedelta(minutes=1), "whole minute"
m //= timedelta(minutes=1)
zreplace = '%c%02d%02d' % (sign, h, m)
assert '%' not in zreplace
newformat.append(zreplace)
elif ch == 'Z':
if Zreplace is None:
Zreplace = ""
if hasattr(object, "tzname"):
s = object.tzname()
if s is not None:
# strftime is going to have at this: escape %
Zreplace = s.replace('%', '%%')
newformat.append(Zreplace)
else:
push('%')
push(ch)
else:
push('%')
else:
push(ch)
newformat = "".join(newformat)
return _time.strftime(newformat, timetuple)
def _call_tzinfo_method(tzinfo, methname, tzinfoarg):
if tzinfo is None:
return None
return getattr(tzinfo, methname)(tzinfoarg)
# Just raise TypeError if the arg isn't None or a string.
def _check_tzname(name):
if name is not None and not isinstance(name, str):
raise TypeError("tzinfo.tzname() must return None or string, "
"not '%s'" % type(name))
# name is the offset-producing method, "utcoffset" or "dst".
# offset is what it returned.
# If offset isn't None or timedelta, raises TypeError.
# If offset is None, returns None.
# Else offset is checked for being in range, and a whole # of minutes.
# If it is, its integer value is returned. Else ValueError is raised.
def _check_utc_offset(name, offset):
assert name in ("utcoffset", "dst")
if offset is None:
return
if not isinstance(offset, timedelta):
raise TypeError("tzinfo.%s() must return None "
"or timedelta, not '%s'" % (name, type(offset)))
if offset % timedelta(minutes=1) or offset.microseconds:
raise ValueError("tzinfo.%s() must return a whole number "
"of minutes, got %s" % (name, offset))
if not -timedelta(1) < offset < timedelta(1):
raise ValueError("%s()=%s, must be must be strictly between"
" -timedelta(hours=24) and timedelta(hours=24)"
% (name, offset))
def _check_date_fields(year, month, day):
if not isinstance(year, int):
raise TypeError('int expected')
if not MINYEAR <= year <= MAXYEAR:
raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
if not 1 <= month <= 12:
raise ValueError('month must be in 1..12', month)
dim = _days_in_month(year, month)
if not 1 <= day <= dim:
raise ValueError('day must be in 1..%d' % dim, day)
def _check_time_fields(hour, minute, second, microsecond):
if not isinstance(hour, int):
raise TypeError('int expected')
if not 0 <= hour <= 23:
raise ValueError('hour must be in 0..23', hour)
if not 0 <= minute <= 59:
raise ValueError('minute must be in 0..59', minute)
if not 0 <= second <= 59:
raise ValueError('second must be in 0..59', second)
if not 0 <= microsecond <= 999999:
raise ValueError('microsecond must be in 0..999999', microsecond)
def _check_tzinfo_arg(tz):
if tz is not None and not isinstance(tz, tzinfo):
raise TypeError("tzinfo argument must be None or of a tzinfo subclass")
def _cmperror(x, y):
raise TypeError("can't compare '%s' to '%s'" % (
type(x).__name__, type(y).__name__))
class timedelta:
"""Represent the difference between two datetime objects.
Supported operators:
- add, subtract timedelta
- unary plus, minus, abs
- compare to timedelta
- multiply, divide by int
In addition, datetime supports subtraction of two datetime objects
returning a timedelta, and addition or subtraction of a datetime
and a timedelta giving a datetime.
Representation: (days, seconds, microseconds). Why? Because I
felt like it.
"""
__slots__ = '_days', '_seconds', '_microseconds'
def __new__(cls, days=0, seconds=0, microseconds=0,
milliseconds=0, minutes=0, hours=0, weeks=0):
# Doing this efficiently and accurately in C is going to be difficult
# and error-prone, due to ubiquitous overflow possibilities, and that
# C double doesn't have enough bits of precision to represent
# microseconds over 10K years faithfully. The code here tries to make
# explicit where go-fast assumptions can be relied on, in order to
# guide the C implementation; it's way more convoluted than speed-
# ignoring auto-overflow-to-long idiomatic Python could be.
# XXX Check that all inputs are ints or floats.
# Final values, all integer.
# s and us fit in 32-bit signed ints; d isn't bounded.
d = s = us = 0
# Normalize everything to days, seconds, microseconds.
days += weeks*7
seconds += minutes*60 + hours*3600
microseconds += milliseconds*1000
# Get rid of all fractions, and normalize s and us.
# Take a deep breath <wink>.
if isinstance(days, float):
dayfrac, days = _math.modf(days)
daysecondsfrac, daysecondswhole = _math.modf(dayfrac * (24.*3600.))
assert daysecondswhole == int(daysecondswhole) # can't overflow
s = int(daysecondswhole)
assert days == int(days)
d = int(days)
else:
daysecondsfrac = 0.0
d = days
assert isinstance(daysecondsfrac, float)
assert abs(daysecondsfrac) <= 1.0
assert isinstance(d, int)
assert abs(s) <= 24 * 3600
# days isn't referenced again before redefinition
if isinstance(seconds, float):
secondsfrac, seconds = _math.modf(seconds)
assert seconds == int(seconds)
seconds = int(seconds)
secondsfrac += daysecondsfrac
assert abs(secondsfrac) <= 2.0
else:
secondsfrac = daysecondsfrac
# daysecondsfrac isn't referenced again
assert isinstance(secondsfrac, float)
assert abs(secondsfrac) <= 2.0
assert isinstance(seconds, int)
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 2 * 24 * 3600
# seconds isn't referenced again before redefinition
usdouble = secondsfrac * 1e6
assert abs(usdouble) < 2.1e6 # exact value not critical
# secondsfrac isn't referenced again
if isinstance(microseconds, float):
microseconds += usdouble
microseconds = round(microseconds, 0)
seconds, microseconds = divmod(microseconds, 1e6)
assert microseconds == int(microseconds)
assert seconds == int(seconds)
days, seconds = divmod(seconds, 24.*3600.)
assert days == int(days)
assert seconds == int(seconds)
d += int(days)
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
else:
seconds, microseconds = divmod(microseconds, 1000000)
days, seconds = divmod(seconds, 24*3600)
d += days
s += int(seconds) # can't overflow
assert isinstance(s, int)
assert abs(s) <= 3 * 24 * 3600
microseconds = float(microseconds)
microseconds += usdouble
microseconds = round(microseconds, 0)
assert abs(s) <= 3 * 24 * 3600
assert abs(microseconds) < 3.1e6
# Just a little bit of carrying possible for microseconds and seconds.
assert isinstance(microseconds, float)
assert int(microseconds) == microseconds
us = int(microseconds)
seconds, us = divmod(us, 1000000)
s += seconds # cant't overflow
assert isinstance(s, int)
days, s = divmod(s, 24*3600)
d += days
assert isinstance(d, int)
assert isinstance(s, int) and 0 <= s < 24*3600
assert isinstance(us, int) and 0 <= us < 1000000
self = object.__new__(cls)
self._days = d
self._seconds = s
self._microseconds = us
if abs(d) > 999999999:
raise OverflowError("timedelta # of days is too large: %d" % d)
return self
def __repr__(self):
if self._microseconds:
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self._days,
self._seconds,
self._microseconds)
if self._seconds:
return "%s(%d, %d)" % ('datetime.' + self.__class__.__name__,
self._days,
self._seconds)
return "%s(%d)" % ('datetime.' + self.__class__.__name__, self._days)
def __str__(self):
mm, ss = divmod(self._seconds, 60)
hh, mm = divmod(mm, 60)
s = "%d:%02d:%02d" % (hh, mm, ss)
if self._days:
def plural(n):
return n, abs(n) != 1 and "s" or ""
s = ("%d day%s, " % plural(self._days)) + s
if self._microseconds:
s = s + ".%06d" % self._microseconds
return s
def total_seconds(self):
"""Total seconds in the duration."""
return ((self.days * 86400 + self.seconds)*10**6 +
self.microseconds) / 10**6
# Read-only field accessors
@property
def days(self):
"""days"""
return self._days
@property
def seconds(self):
"""seconds"""
return self._seconds
@property
def microseconds(self):
"""microseconds"""
return self._microseconds
def __add__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days + other._days,
self._seconds + other._seconds,
self._microseconds + other._microseconds)
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
if isinstance(other, timedelta):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days - other._days,
self._seconds - other._seconds,
self._microseconds - other._microseconds)
return NotImplemented
def __rsub__(self, other):
if isinstance(other, timedelta):
return -self + other
return NotImplemented
def __neg__(self):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(-self._days,
-self._seconds,
-self._microseconds)
def __pos__(self):
return self
def __abs__(self):
if self._days < 0:
return -self
else:
return self
def __mul__(self, other):
if isinstance(other, int):
# for CPython compatibility, we cannot use
# our __class__ here, but need a real timedelta
return timedelta(self._days * other,
self._seconds * other,
self._microseconds * other)
if isinstance(other, float):
a, b = other.as_integer_ratio()
return self * a / b
return NotImplemented
__rmul__ = __mul__
def _to_microseconds(self):
return ((self._days * (24*3600) + self._seconds) * 1000000 +
self._microseconds)
def __floordiv__(self, other):
if not isinstance(other, (int, timedelta)):
return NotImplemented
usec = self._to_microseconds()
if isinstance(other, timedelta):
return usec // other._to_microseconds()
if isinstance(other, int):
return timedelta(0, 0, usec // other)
def __truediv__(self, other):
if not isinstance(other, (int, float, timedelta)):
return NotImplemented
usec = self._to_microseconds()
if isinstance(other, timedelta):
return usec / other._to_microseconds()
if isinstance(other, int):
return timedelta(0, 0, usec / other)
if isinstance(other, float):
a, b = other.as_integer_ratio()
return timedelta(0, 0, b * usec / a)
def __mod__(self, other):
if isinstance(other, timedelta):
r = self._to_microseconds() % other._to_microseconds()
return timedelta(0, 0, r)
return NotImplemented
def __divmod__(self, other):
if isinstance(other, timedelta):
q, r = divmod(self._to_microseconds(),
other._to_microseconds())
return q, timedelta(0, 0, r)
return NotImplemented
# Comparisons of timedelta objects with other.
def __eq__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) != 0
else:
return True
def __le__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) > 0
else:
_cmperror(self, other)
def _cmp(self, other):
assert isinstance(other, timedelta)
return _cmp(self._getstate(), other._getstate())
def __hash__(self):
return hash(self._getstate())
def __bool__(self):
return (self._days != 0 or
self._seconds != 0 or
self._microseconds != 0)
# Pickle support.
def _getstate(self):
return (self._days, self._seconds, self._microseconds)
def __reduce__(self):
return (self.__class__, self._getstate())
timedelta.min = timedelta(-999999999)
timedelta.max = timedelta(days=999999999, hours=23, minutes=59, seconds=59,
microseconds=999999)
timedelta.resolution = timedelta(microseconds=1)
class date:
"""Concrete date type.
Constructors:
__new__()
fromtimestamp()
today()
fromordinal()
Operators:
__repr__, __str__
__cmp__, __hash__
__add__, __radd__, __sub__ (add/radd only with timedelta arg)
Methods:
timetuple()
toordinal()
weekday()
isoweekday(), isocalendar(), isoformat()
ctime()
strftime()
Properties (readonly):
year, month, day
"""
__slots__ = '_year', '_month', '_day'
def __new__(cls, year, month=None, day=None):
"""Constructor.
Arguments:
year, month, day (required, base 1)
"""
if (isinstance(year, bytes) and len(year) == 4 and
1 <= year[2] <= 12 and month is None): # Month is sane
# Pickle support
self = object.__new__(cls)
self.__setstate(year)
return self
_check_date_fields(year, month, day)
self = object.__new__(cls)
self._year = year
self._month = month
self._day = day
return self
# Additional constructors
@classmethod
def fromtimestamp(cls, t):
"Construct a date from a POSIX timestamp (like time.time())."
y, m, d, hh, mm, ss, weekday, jday, dst = _time.localtime(t)
return cls(y, m, d)
@classmethod
def today(cls):
"Construct a date from time.time()."
t = _time.time()
return cls.fromtimestamp(t)
@classmethod
def fromordinal(cls, n):
"""Contruct a date from a proleptic Gregorian ordinal.
January 1 of year 1 is day 1. Only the year, month and day are
non-zero in the result.
"""
y, m, d = _ord2ymd(n)
return cls(y, m, d)
# Conversions to string
def __repr__(self):
"""Convert to formal string, for repr().
>>> dt = datetime(2010, 1, 1)
>>> repr(dt)
'datetime.datetime(2010, 1, 1, 0, 0)'
>>> dt = datetime(2010, 1, 1, tzinfo=timezone.utc)
>>> repr(dt)
'datetime.datetime(2010, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)'
"""
return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__,
self._year,
self._month,
self._day)
# XXX These shouldn't depend on time.localtime(), because that
# clips the usable dates to [1970 .. 2038). At least ctime() is
# easily done without using strftime() -- that's better too because
# strftime("%c", ...) is locale specific.
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d 00:00:00 %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self._month],
self._day, self._year)
def strftime(self, fmt):
"Format using strftime()."
return _wrap_strftime(self, fmt, self.timetuple())
def __format__(self, fmt):
if len(fmt) != 0:
return self.strftime(fmt)
return str(self)
def isoformat(self):
"""Return the date formatted according to ISO.
This is 'YYYY-MM-DD'.
References:
- http://www.w3.org/TR/NOTE-datetime
- http://www.cl.cam.ac.uk/~mgk25/iso-time.html
"""
return "%04d-%02d-%02d" % (self._year, self._month, self._day)
__str__ = isoformat
# Read-only field accessors
@property
def year(self):
"""year (1-9999)"""
return self._year
@property
def month(self):
"""month (1-12)"""
return self._month
@property
def day(self):
"""day (1-31)"""
return self._day
# Standard conversions, __cmp__, __hash__ (and helpers)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
return _build_struct_time(self._year, self._month, self._day,
0, 0, 0, -1)
def toordinal(self):
"""Return proleptic Gregorian ordinal for the year, month and day.
January 1 of year 1 is day 1. Only the year, month and day values
contribute to the result.
"""
return _ymd2ord(self._year, self._month, self._day)
def replace(self, year=None, month=None, day=None):
"""Return a new date with new values for the specified fields."""
if year is None:
year = self._year
if month is None:
month = self._month
if day is None:
day = self._day
_check_date_fields(year, month, day)
return date(year, month, day)
# Comparisons of date objects with other.
def __eq__(self, other):
if isinstance(other, date):
return self._cmp(other) == 0
return NotImplemented
def __ne__(self, other):
if isinstance(other, date):
return self._cmp(other) != 0
return NotImplemented
def __le__(self, other):
if isinstance(other, date):
return self._cmp(other) <= 0
return NotImplemented
def __lt__(self, other):
if isinstance(other, date):
return self._cmp(other) < 0
return NotImplemented
def __ge__(self, other):
if isinstance(other, date):
return self._cmp(other) >= 0
return NotImplemented
def __gt__(self, other):
if isinstance(other, date):
return self._cmp(other) > 0
return NotImplemented
def _cmp(self, other):
assert isinstance(other, date)
y, m, d = self._year, self._month, self._day
y2, m2, d2 = other._year, other._month, other._day
return _cmp((y, m, d), (y2, m2, d2))
def __hash__(self):
"Hash."
return hash(self._getstate())
# Computations
def __add__(self, other):
"Add a date to a timedelta."
if isinstance(other, timedelta):
o = self.toordinal() + other.days
if 0 < o <= _MAXORDINAL:
return date.fromordinal(o)
raise OverflowError("result out of range")
return NotImplemented
__radd__ = __add__
def __sub__(self, other):
"""Subtract two dates, or a date and a timedelta."""
if isinstance(other, timedelta):
return self + timedelta(-other.days)
if isinstance(other, date):
days1 = self.toordinal()
days2 = other.toordinal()
return timedelta(days1 - days2)
return NotImplemented
def weekday(self):
"Return day of the week, where Monday == 0 ... Sunday == 6."
return (self.toordinal() + 6) % 7
# Day-of-the-week and week-of-the-year, according to ISO
def isoweekday(self):
"Return day of the week, where Monday == 1 ... Sunday == 7."
# 1-Jan-0001 is a Monday
return self.toordinal() % 7 or 7
def isocalendar(self):
"""Return a 3-tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
from that.
The first week is 1; Monday is 1 ... Sunday is 7.
ISO calendar algorithm taken from
http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm
"""
year = self._year
week1monday = _isoweek1monday(year)
today = _ymd2ord(self._year, self._month, self._day)
# Internally, week and day have origin 0
week, day = divmod(today - week1monday, 7)
if week < 0:
year -= 1
week1monday = _isoweek1monday(year)
week, day = divmod(today - week1monday, 7)
elif week >= 52:
if today >= _isoweek1monday(year+1):
year += 1
week = 0
return year, week+1, day+1
# Pickle support.
def _getstate(self):
yhi, ylo = divmod(self._year, 256)
return bytes([yhi, ylo, self._month, self._day]),
def __setstate(self, string):
if len(string) != 4 or not (1 <= string[2] <= 12):
raise TypeError("not enough arguments")
yhi, ylo, self._month, self._day = string
self._year = yhi * 256 + ylo
def __reduce__(self):
return (self.__class__, self._getstate())
_date_class = date # so functions w/ args named "date" can get at the class
date.min = date(1, 1, 1)
date.max = date(9999, 12, 31)
date.resolution = timedelta(days=1)
class tzinfo:
"""Abstract base class for time zone info classes.
Subclasses must override the name(), utcoffset() and dst() methods.
"""
__slots__ = ()
def tzname(self, dt):
"datetime -> string name of time zone."
raise NotImplementedError("tzinfo subclass must override tzname()")
def utcoffset(self, dt):
"datetime -> minutes east of UTC (negative for west of UTC)"
raise NotImplementedError("tzinfo subclass must override utcoffset()")
def dst(self, dt):
"""datetime -> DST offset in minutes east of UTC.
Return 0 if DST not in effect. utcoffset() must include the DST
offset.
"""
raise NotImplementedError("tzinfo subclass must override dst()")
def fromutc(self, dt):
"datetime in UTC -> datetime in local time."
if not isinstance(dt, datetime):
raise TypeError("fromutc() requires a datetime argument")
if dt.tzinfo is not self:
raise ValueError("dt.tzinfo is not self")
dtoff = dt.utcoffset()
if dtoff is None:
raise ValueError("fromutc() requires a non-None utcoffset() "
"result")
# See the long comment block at the end of this file for an
# explanation of this algorithm.
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc() requires a non-None dst() result")
delta = dtoff - dtdst
if delta:
dt += delta
dtdst = dt.dst()
if dtdst is None:
raise ValueError("fromutc(): dt.dst gave inconsistent "
"results; cannot convert")
return dt + dtdst
# Pickle support.
def __reduce__(self):
getinitargs = getattr(self, "__getinitargs__", None)
if getinitargs:
args = getinitargs()
else:
args = ()
getstate = getattr(self, "__getstate__", None)
if getstate:
state = getstate()
else:
state = getattr(self, "__dict__", None) or None
if state is None:
return (self.__class__, args)
else:
return (self.__class__, args, state)
_tzinfo_class = tzinfo
class time:
"""Time with time zone.
Constructors:
__new__()
Operators:
__repr__, __str__
__cmp__, __hash__
Methods:
strftime()
isoformat()
utcoffset()
tzname()
dst()
Properties (readonly):
hour, minute, second, microsecond, tzinfo
"""
def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None):
"""Constructor.
Arguments:
hour, minute (required)
second, microsecond (default to zero)
tzinfo (default to None)
"""
self = object.__new__(cls)
if isinstance(hour, bytes) and len(hour) == 6:
# Pickle support
self.__setstate(hour, minute or None)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self._hour = hour
self._minute = minute
self._second = second
self._microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
@property
def hour(self):
"""hour (0-23)"""
return self._hour
@property
def minute(self):
"""minute (0-59)"""
return self._minute
@property
def second(self):
"""second (0-59)"""
return self._second
@property
def microsecond(self):
"""microsecond (0-999999)"""
return self._microsecond
@property
def tzinfo(self):
"""timezone info object"""
return self._tzinfo
# Standard conversions, __hash__ (and helpers)
# Comparisons of time objects with other.
def __eq__(self, other):
if isinstance(other, time):
return self._cmp(other, allow_mixed=True) == 0
else:
return False
def __ne__(self, other):
if isinstance(other, time):
return self._cmp(other, allow_mixed=True) != 0
else:
return True
def __le__(self, other):
if isinstance(other, time):
return self._cmp(other) <= 0
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, time):
return self._cmp(other) < 0
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, time):
return self._cmp(other) >= 0
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, time):
return self._cmp(other) > 0
else:
_cmperror(self, other)
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, time)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self.utcoffset()
otoff = other.utcoffset()
base_compare = myoff == otoff
if base_compare:
return _cmp((self._hour, self._minute, self._second,
self._microsecond),
(other._hour, other._minute, other._second,
other._microsecond))
if myoff is None or otoff is None:
if allow_mixed:
return 2 # arbitrary non-zero value
else:
raise TypeError("cannot compare naive and aware times")
myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1)
othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1)
return _cmp((myhhmm, self._second, self._microsecond),
(othhmm, other._second, other._microsecond))
def __hash__(self):
"""Hash."""
tzoff = self.utcoffset()
if not tzoff: # zero or None
return hash(self._getstate()[0])
h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff,
timedelta(hours=1))
assert not m % timedelta(minutes=1), "whole minute"
m //= timedelta(minutes=1)
if 0 <= h < 24:
return hash(time(h, m, self.second, self.microsecond))
return hash((h, m, self.second, self.microsecond))
# Conversion to string
def _tzstr(self, sep=":"):
"""Return formatted timezone offset (+xx:xx) or None."""
off = self.utcoffset()
if off is not None:
if off.days < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, timedelta(hours=1))
assert not mm % timedelta(minutes=1), "whole minute"
mm //= timedelta(minutes=1)
assert 0 <= hh < 24
off = "%s%02d%s%02d" % (sign, hh, sep, mm)
return off
def __repr__(self):
"""Convert to formal string, for repr()."""
if self._microsecond != 0:
s = ", %d, %d" % (self._second, self._microsecond)
elif self._second != 0:
s = ", %d" % self._second
else:
s = ""
s= "%s(%d, %d%s)" % ('datetime.' + self.__class__.__name__,
self._hour, self._minute, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def isoformat(self):
"""Return the time formatted according to ISO.
This is 'HH:MM:SS.mmmmmm+zz:zz', or 'HH:MM:SS+zz:zz' if
self.microsecond == 0.
"""
s = _format_time(self._hour, self._minute, self._second,
self._microsecond)
tz = self._tzstr()
if tz:
s += tz
return s
__str__ = isoformat
def strftime(self, fmt):
"""Format using strftime(). The date part of the timestamp passed
to underlying strftime should not be used.
"""
# The year must be >= 1000 else Python's strftime implementation
# can raise a bogus exception.
timetuple = (1900, 1, 1,
self._hour, self._minute, self._second,
0, 1, -1)
return _wrap_strftime(self, fmt, timetuple)
def __format__(self, fmt):
if len(fmt) != 0:
return self.strftime(fmt)
return str(self)
# Timezone functions
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
if self._tzinfo is None:
return None
offset = self._tzinfo.utcoffset(None)
_check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
if self._tzinfo is None:
return None
name = self._tzinfo.tzname(None)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
if self._tzinfo is None:
return None
offset = self._tzinfo.dst(None)
_check_utc_offset("dst", offset)
return offset
def replace(self, hour=None, minute=None, second=None, microsecond=None,
tzinfo=True):
"""Return a new time with new values for the specified fields."""
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return time(hour, minute, second, microsecond, tzinfo)
def __bool__(self):
if self.second or self.microsecond:
return True
offset = self.utcoffset() or timedelta(0)
return timedelta(hours=self.hour, minutes=self.minute) != offset
# Pickle support.
def _getstate(self):
us2, us3 = divmod(self._microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = bytes([self._hour, self._minute, self._second,
us1, us2, us3])
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
if len(string) != 6 or string[0] >= 24:
raise TypeError("an integer is required")
(self._hour, self._minute, self._second,
us1, us2, us3) = string
self._microsecond = (((us1 << 8) | us2) << 8) | us3
if tzinfo is None or isinstance(tzinfo, _tzinfo_class):
self._tzinfo = tzinfo
else:
raise TypeError("bad tzinfo state arg %r" % tzinfo)
def __reduce__(self):
return (time, self._getstate())
_time_class = time # so functions w/ args named "time" can get at the class
time.min = time(0, 0, 0)
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
class datetime(date):
"""datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
The year, month and day arguments are required. tzinfo may be None, or an
instance of a tzinfo subclass. The remaining arguments may be ints.
"""
__slots__ = date.__slots__ + (
'_hour', '_minute', '_second',
'_microsecond', '_tzinfo')
def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0,
microsecond=0, tzinfo=None):
if isinstance(year, bytes) and len(year) == 10:
# Pickle support
self = date.__new__(cls, year[:4])
self.__setstate(year, month)
return self
_check_tzinfo_arg(tzinfo)
_check_time_fields(hour, minute, second, microsecond)
self = date.__new__(cls, year, month, day)
self._hour = hour
self._minute = minute
self._second = second
self._microsecond = microsecond
self._tzinfo = tzinfo
return self
# Read-only field accessors
@property
def hour(self):
"""hour (0-23)"""
return self._hour
@property
def minute(self):
"""minute (0-59)"""
return self._minute
@property
def second(self):
"""second (0-59)"""
return self._second
@property
def microsecond(self):
"""microsecond (0-999999)"""
return self._microsecond
@property
def tzinfo(self):
"""timezone info object"""
return self._tzinfo
@classmethod
def fromtimestamp(cls, t, tz=None):
"""Construct a datetime from a POSIX timestamp (like time.time()).
A timezone info object may be passed in as well.
"""
_check_tzinfo_arg(tz)
converter = _time.localtime if tz is None else _time.gmtime
t, frac = divmod(t, 1.0)
us = int(frac * 1e6)
# If timestamp is less than one microsecond smaller than a
# full second, us can be rounded up to 1000000. In this case,
# roll over to seconds, otherwise, ValueError is raised
# by the constructor.
if us == 1000000:
t += 1
us = 0
y, m, d, hh, mm, ss, weekday, jday, dst = converter(t)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
result = cls(y, m, d, hh, mm, ss, us, tz)
if tz is not None:
result = tz.fromutc(result)
return result
@classmethod
def utcfromtimestamp(cls, t):
"Construct a UTC datetime from a POSIX timestamp (like time.time())."
t, frac = divmod(t, 1.0)
us = int(frac * 1e6)
# If timestamp is less than one microsecond smaller than a
# full second, us can be rounded up to 1000000. In this case,
# roll over to seconds, otherwise, ValueError is raised
# by the constructor.
if us == 1000000:
t += 1
us = 0
y, m, d, hh, mm, ss, weekday, jday, dst = _time.gmtime(t)
ss = min(ss, 59) # clamp out leap seconds if the platform has them
return cls(y, m, d, hh, mm, ss, us)
# XXX This is supposed to do better than we *can* do by using time.time(),
# XXX if the platform supports a more accurate way. The C implementation
# XXX uses gettimeofday on platforms that have it, but that isn't
# XXX available from Python. So now() may return different results
# XXX across the implementations.
@classmethod
def now(cls, tz=None):
"Construct a datetime from time.time() and optional time zone info."
t = _time.time()
return cls.fromtimestamp(t, tz)
@classmethod
def utcnow(cls):
"Construct a UTC datetime from time.time()."
t = _time.time()
return cls.utcfromtimestamp(t)
@classmethod
def combine(cls, date, time):
"Construct a datetime from a given date and a given time."
if not isinstance(date, _date_class):
raise TypeError("date argument must be a date instance")
if not isinstance(time, _time_class):
raise TypeError("time argument must be a time instance")
return cls(date.year, date.month, date.day,
time.hour, time.minute, time.second, time.microsecond,
time.tzinfo)
def timetuple(self):
"Return local time tuple compatible with time.localtime()."
dst = self.dst()
if dst is None:
dst = -1
elif dst:
dst = 1
else:
dst = 0
return _build_struct_time(self.year, self.month, self.day,
self.hour, self.minute, self.second,
dst)
def timestamp(self):
"Return POSIX timestamp as float"
if self._tzinfo is None:
return _time.mktime((self.year, self.month, self.day,
self.hour, self.minute, self.second,
-1, -1, -1)) + self.microsecond / 1e6
else:
return (self - _EPOCH).total_seconds()
def utctimetuple(self):
"Return UTC time tuple compatible with time.gmtime()."
offset = self.utcoffset()
if offset:
self -= offset
y, m, d = self.year, self.month, self.day
hh, mm, ss = self.hour, self.minute, self.second
return _build_struct_time(y, m, d, hh, mm, ss, 0)
def date(self):
"Return the date part."
return date(self._year, self._month, self._day)
def time(self):
"Return the time part, with tzinfo None."
return time(self.hour, self.minute, self.second, self.microsecond)
def timetz(self):
"Return the time part, with same tzinfo."
return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo)
def replace(self, year=None, month=None, day=None, hour=None,
minute=None, second=None, microsecond=None, tzinfo=True):
"""Return a new datetime with new values for the specified fields."""
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is True:
tzinfo = self.tzinfo
_check_date_fields(year, month, day)
_check_time_fields(hour, minute, second, microsecond)
_check_tzinfo_arg(tzinfo)
return datetime(year, month, day, hour, minute, second,
microsecond, tzinfo)
def astimezone(self, tz=None):
if tz is None:
if self.tzinfo is None:
raise ValueError("astimezone() requires an aware datetime")
ts = (self - _EPOCH) // timedelta(seconds=1)
localtm = _time.localtime(ts)
local = datetime(*localtm[:6])
try:
# Extract TZ data if available
gmtoff = localtm.tm_gmtoff
zone = localtm.tm_zone
except AttributeError:
# Compute UTC offset and compare with the value implied
# by tm_isdst. If the values match, use the zone name
# implied by tm_isdst.
delta = local - datetime(*_time.gmtime(ts)[:6])
dst = _time.daylight and localtm.tm_isdst > 0
gmtoff = -(_time.altzone if dst else _time.timezone)
if delta == timedelta(seconds=gmtoff):
tz = timezone(delta, _time.tzname[dst])
else:
tz = timezone(delta)
else:
tz = timezone(timedelta(seconds=gmtoff), zone)
elif not isinstance(tz, tzinfo):
raise TypeError("tz argument must be an instance of tzinfo")
mytz = self.tzinfo
if mytz is None:
raise ValueError("astimezone() requires an aware datetime")
if tz is mytz:
return self
# Convert self to UTC, and attach the new time zone object.
myoffset = self.utcoffset()
if myoffset is None:
raise ValueError("astimezone() requires an aware datetime")
utc = (self - myoffset).replace(tzinfo=tz)
# Convert from UTC to tz's local time.
return tz.fromutc(utc)
# Ways to produce a string.
def ctime(self):
"Return ctime() style string."
weekday = self.toordinal() % 7 or 7
return "%s %s %2d %02d:%02d:%02d %04d" % (
_DAYNAMES[weekday],
_MONTHNAMES[self._month],
self._day,
self._hour, self._minute, self._second,
self._year)
def isoformat(self, sep='T'):
"""Return the time formatted according to ISO.
This is 'YYYY-MM-DD HH:MM:SS.mmmmmm', or 'YYYY-MM-DD HH:MM:SS' if
self.microsecond == 0.
If self.tzinfo is not None, the UTC offset is also attached, giving
'YYYY-MM-DD HH:MM:SS.mmmmmm+HH:MM' or 'YYYY-MM-DD HH:MM:SS+HH:MM'.
Optional argument sep specifies the separator between date and
time, default 'T'.
"""
s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day,
sep) +
_format_time(self._hour, self._minute, self._second,
self._microsecond))
off = self.utcoffset()
if off is not None:
if off.days < 0:
sign = "-"
off = -off
else:
sign = "+"
hh, mm = divmod(off, timedelta(hours=1))
assert not mm % timedelta(minutes=1), "whole minute"
mm //= timedelta(minutes=1)
s += "%s%02d:%02d" % (sign, hh, mm)
return s
def __repr__(self):
"""Convert to formal string, for repr()."""
L = [self._year, self._month, self._day, # These are never zero
self._hour, self._minute, self._second, self._microsecond]
if L[-1] == 0:
del L[-1]
if L[-1] == 0:
del L[-1]
s = ", ".join(map(str, L))
s = "%s(%s)" % ('datetime.' + self.__class__.__name__, s)
if self._tzinfo is not None:
assert s[-1:] == ")"
s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")"
return s
def __str__(self):
"Convert to string, for str()."
return self.isoformat(sep=' ')
@classmethod
def strptime(cls, date_string, format):
'string, format -> new datetime parsed from a string (like time.strptime()).'
import _strptime
return _strptime._strptime_datetime(cls, date_string, format)
def utcoffset(self):
"""Return the timezone offset in minutes east of UTC (negative west of
UTC)."""
if self._tzinfo is None:
return None
offset = self._tzinfo.utcoffset(self)
_check_utc_offset("utcoffset", offset)
return offset
def tzname(self):
"""Return the timezone name.
Note that the name is 100% informational -- there's no requirement that
it mean anything in particular. For example, "GMT", "UTC", "-500",
"-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies.
"""
name = _call_tzinfo_method(self._tzinfo, "tzname", self)
_check_tzname(name)
return name
def dst(self):
"""Return 0 if DST is not in effect, or the DST offset (in minutes
eastward) if DST is in effect.
This is purely informational; the DST offset has already been added to
the UTC offset returned by utcoffset() if applicable, so there's no
need to consult dst() unless you're interested in displaying the DST
info.
"""
if self._tzinfo is None:
return None
offset = self._tzinfo.dst(self)
_check_utc_offset("dst", offset)
return offset
# Comparisons of datetime objects with other.
def __eq__(self, other):
if isinstance(other, datetime):
return self._cmp(other, allow_mixed=True) == 0
elif not isinstance(other, date):
return NotImplemented
else:
return False
def __ne__(self, other):
if isinstance(other, datetime):
return self._cmp(other, allow_mixed=True) != 0
elif not isinstance(other, date):
return NotImplemented
else:
return True
def __le__(self, other):
if isinstance(other, datetime):
return self._cmp(other) <= 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __lt__(self, other):
if isinstance(other, datetime):
return self._cmp(other) < 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __ge__(self, other):
if isinstance(other, datetime):
return self._cmp(other) >= 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def __gt__(self, other):
if isinstance(other, datetime):
return self._cmp(other) > 0
elif not isinstance(other, date):
return NotImplemented
else:
_cmperror(self, other)
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, datetime)
mytz = self._tzinfo
ottz = other._tzinfo
myoff = otoff = None
if mytz is ottz:
base_compare = True
else:
myoff = self.utcoffset()
otoff = other.utcoffset()
base_compare = myoff == otoff
if base_compare:
return _cmp((self._year, self._month, self._day,
self._hour, self._minute, self._second,
self._microsecond),
(other._year, other._month, other._day,
other._hour, other._minute, other._second,
other._microsecond))
if myoff is None or otoff is None:
if allow_mixed:
return 2 # arbitrary non-zero value
else:
raise TypeError("cannot compare naive and aware datetimes")
# XXX What follows could be done more efficiently...
diff = self - other # this will take offsets into account
if diff.days < 0:
return -1
return diff and 1 or 0
def __add__(self, other):
"Add a datetime and a timedelta."
if not isinstance(other, timedelta):
return NotImplemented
delta = timedelta(self.toordinal(),
hours=self._hour,
minutes=self._minute,
seconds=self._second,
microseconds=self._microsecond)
delta += other
hour, rem = divmod(delta.seconds, 3600)
minute, second = divmod(rem, 60)
if 0 < delta.days <= _MAXORDINAL:
return datetime.combine(date.fromordinal(delta.days),
time(hour, minute, second,
delta.microseconds,
tzinfo=self._tzinfo))
raise OverflowError("result out of range")
__radd__ = __add__
def __sub__(self, other):
"Subtract two datetimes, or a datetime and a timedelta."
if not isinstance(other, datetime):
if isinstance(other, timedelta):
return self + -other
return NotImplemented
days1 = self.toordinal()
days2 = other.toordinal()
secs1 = self._second + self._minute * 60 + self._hour * 3600
secs2 = other._second + other._minute * 60 + other._hour * 3600
base = timedelta(days1 - days2,
secs1 - secs2,
self._microsecond - other._microsecond)
if self._tzinfo is other._tzinfo:
return base
myoff = self.utcoffset()
otoff = other.utcoffset()
if myoff == otoff:
return base
if myoff is None or otoff is None:
raise TypeError("cannot mix naive and timezone-aware time")
return base + otoff - myoff
def __hash__(self):
tzoff = self.utcoffset()
if tzoff is None:
return hash(self._getstate()[0])
days = _ymd2ord(self.year, self.month, self.day)
seconds = self.hour * 3600 + self.minute * 60 + self.second
return hash(timedelta(days, seconds, self.microsecond) - tzoff)
# Pickle support.
def _getstate(self):
yhi, ylo = divmod(self._year, 256)
us2, us3 = divmod(self._microsecond, 256)
us1, us2 = divmod(us2, 256)
basestate = bytes([yhi, ylo, self._month, self._day,
self._hour, self._minute, self._second,
us1, us2, us3])
if self._tzinfo is None:
return (basestate,)
else:
return (basestate, self._tzinfo)
def __setstate(self, string, tzinfo):
(yhi, ylo, self._month, self._day, self._hour,
self._minute, self._second, us1, us2, us3) = string
self._year = yhi * 256 + ylo
self._microsecond = (((us1 << 8) | us2) << 8) | us3
if tzinfo is None or isinstance(tzinfo, _tzinfo_class):
self._tzinfo = tzinfo
else:
raise TypeError("bad tzinfo state arg %r" % tzinfo)
def __reduce__(self):
return (self.__class__, self._getstate())
datetime.min = datetime(1, 1, 1)
datetime.max = datetime(9999, 12, 31, 23, 59, 59, 999999)
datetime.resolution = timedelta(microseconds=1)
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
# XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
week1monday = firstday - firstweekday
if firstweekday > THURSDAY:
week1monday += 7
return week1monday
class timezone(tzinfo):
__slots__ = '_offset', '_name'
# Sentinel value to disallow None
_Omitted = object()
def __new__(cls, offset, name=_Omitted):
if not isinstance(offset, timedelta):
raise TypeError("offset must be a timedelta")
if name is cls._Omitted:
if not offset:
return cls.utc
name = None
elif not isinstance(name, str):
raise TypeError("name must be a string")
if not cls._minoffset <= offset <= cls._maxoffset:
raise ValueError("offset must be a timedelta"
" strictly between -timedelta(hours=24) and"
" timedelta(hours=24).")
if (offset.microseconds != 0 or
offset.seconds % 60 != 0):
raise ValueError("offset must be a timedelta"
" representing a whole number of minutes")
return cls._create(offset, name)
@classmethod
def _create(cls, offset, name=None):
self = tzinfo.__new__(cls)
self._offset = offset
self._name = name
return self
def __getinitargs__(self):
"""pickle support"""
if self._name is None:
return (self._offset,)
return (self._offset, self._name)
def __eq__(self, other):
if type(other) != timezone:
return False
return self._offset == other._offset
def __hash__(self):
return hash(self._offset)
def __repr__(self):
"""Convert to formal string, for repr().
>>> tz = timezone.utc
>>> repr(tz)
'datetime.timezone.utc'
>>> tz = timezone(timedelta(hours=-5), 'EST')
>>> repr(tz)
"datetime.timezone(datetime.timedelta(-1, 68400), 'EST')"
"""
if self is self.utc:
return 'datetime.timezone.utc'
if self._name is None:
return "%s(%r)" % ('datetime.' + self.__class__.__name__,
self._offset)
return "%s(%r, %r)" % ('datetime.' + self.__class__.__name__,
self._offset, self._name)
def __str__(self):
return self.tzname(None)
def utcoffset(self, dt):
if isinstance(dt, datetime) or dt is None:
return self._offset
raise TypeError("utcoffset() argument must be a datetime instance"
" or None")
def tzname(self, dt):
if isinstance(dt, datetime) or dt is None:
if self._name is None:
return self._name_from_offset(self._offset)
return self._name
raise TypeError("tzname() argument must be a datetime instance"
" or None")
def dst(self, dt):
if isinstance(dt, datetime) or dt is None:
return None
raise TypeError("dst() argument must be a datetime instance"
" or None")
def fromutc(self, dt):
if isinstance(dt, datetime):
if dt.tzinfo is not self:
raise ValueError("fromutc: dt.tzinfo "
"is not self")
return dt + self._offset
raise TypeError("fromutc() argument must be a datetime instance"
" or None")
_maxoffset = timedelta(hours=23, minutes=59)
_minoffset = -_maxoffset
@staticmethod
def _name_from_offset(delta):
if delta < timedelta(0):
sign = '-'
delta = -delta
else:
sign = '+'
hours, rest = divmod(delta, timedelta(hours=1))
minutes = rest // timedelta(minutes=1)
return 'UTC{}{:02d}:{:02d}'.format(sign, hours, minutes)
timezone.utc = timezone._create(timedelta(0))
timezone.min = timezone._create(timezone._minoffset)
timezone.max = timezone._create(timezone._maxoffset)
_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc)
"""
Some time zone algebra. For a datetime x, let
x.n = x stripped of its timezone -- its naive time.
x.o = x.utcoffset(), and assuming that doesn't raise an exception or
return None
x.d = x.dst(), and assuming that doesn't raise an exception or
return None
x.s = x's standard offset, x.o - x.d
Now some derived rules, where k is a duration (timedelta).
1. x.o = x.s + x.d
This follows from the definition of x.s.
2. If x and y have the same tzinfo member, x.s = y.s.
This is actually a requirement, an assumption we need to make about
sane tzinfo classes.
3. The naive UTC time corresponding to x is x.n - x.o.
This is again a requirement for a sane tzinfo class.
4. (x+k).s = x.s
This follows from #2, and that datimetimetz+timedelta preserves tzinfo.
5. (x+k).n = x.n + k
Again follows from how arithmetic is defined.
Now we can explain tz.fromutc(x). Let's assume it's an interesting case
(meaning that the various tzinfo methods exist, and don't blow up or return
None when called).
The function wants to return a datetime y with timezone tz, equivalent to x.
x is already in UTC.
By #3, we want
y.n - y.o = x.n [1]
The algorithm starts by attaching tz to x.n, and calling that y. So
x.n = y.n at the start. Then it wants to add a duration k to y, so that [1]
becomes true; in effect, we want to solve [2] for k:
(y+k).n - (y+k).o = x.n [2]
By #1, this is the same as
(y+k).n - ((y+k).s + (y+k).d) = x.n [3]
By #5, (y+k).n = y.n + k, which equals x.n + k because x.n=y.n at the start.
Substituting that into [3],
x.n + k - (y+k).s - (y+k).d = x.n; the x.n terms cancel, leaving
k - (y+k).s - (y+k).d = 0; rearranging,
k = (y+k).s - (y+k).d; by #4, (y+k).s == y.s, so
k = y.s - (y+k).d
On the RHS, (y+k).d can't be computed directly, but y.s can be, and we
approximate k by ignoring the (y+k).d term at first. Note that k can't be
very large, since all offset-returning methods return a duration of magnitude
less than 24 hours. For that reason, if y is firmly in std time, (y+k).d must
be 0, so ignoring it has no consequence then.
In any case, the new value is
z = y + y.s [4]
It's helpful to step back at look at [4] from a higher level: it's simply
mapping from UTC to tz's standard time.
At this point, if
z.n - z.o = x.n [5]
we have an equivalent time, and are almost done. The insecurity here is
at the start of daylight time. Picture US Eastern for concreteness. The wall
time jumps from 1:59 to 3:00, and wall hours of the form 2:MM don't make good
sense then. The docs ask that an Eastern tzinfo class consider such a time to
be EDT (because it's "after 2"), which is a redundant spelling of 1:MM EST
on the day DST starts. We want to return the 1:MM EST spelling because that's
the only spelling that makes sense on the local wall clock.
In fact, if [5] holds at this point, we do have the standard-time spelling,
but that takes a bit of proof. We first prove a stronger result. What's the
difference between the LHS and RHS of [5]? Let
diff = x.n - (z.n - z.o) [6]
Now
z.n = by [4]
(y + y.s).n = by #5
y.n + y.s = since y.n = x.n
x.n + y.s = since z and y are have the same tzinfo member,
y.s = z.s by #2
x.n + z.s
Plugging that back into [6] gives
diff =
x.n - ((x.n + z.s) - z.o) = expanding
x.n - x.n - z.s + z.o = cancelling
- z.s + z.o = by #2
z.d
So diff = z.d.
If [5] is true now, diff = 0, so z.d = 0 too, and we have the standard-time
spelling we wanted in the endcase described above. We're done. Contrarily,
if z.d = 0, then we have a UTC equivalent, and are also done.
If [5] is not true now, diff = z.d != 0, and z.d is the offset we need to
add to z (in effect, z is in tz's standard time, and we need to shift the
local clock into tz's daylight time).
Let
z' = z + z.d = z + diff [7]
and we can again ask whether
z'.n - z'.o = x.n [8]
If so, we're done. If not, the tzinfo class is insane, according to the
assumptions we've made. This also requires a bit of proof. As before, let's
compute the difference between the LHS and RHS of [8] (and skipping some of
the justifications for the kinds of substitutions we've done several times
already):
diff' = x.n - (z'.n - z'.o) = replacing z'.n via [7]
x.n - (z.n + diff - z'.o) = replacing diff via [6]
x.n - (z.n + x.n - (z.n - z.o) - z'.o) =
x.n - z.n - x.n + z.n - z.o + z'.o = cancel x.n
- z.n + z.n - z.o + z'.o = cancel z.n
- z.o + z'.o = #1 twice
-z.s - z.d + z'.s + z'.d = z and z' have same tzinfo
z'.d - z.d
So z' is UTC-equivalent to x iff z'.d = z.d at this point. If they are equal,
we've found the UTC-equivalent so are done. In fact, we stop with [7] and
return z', not bothering to compute z'.d.
How could z.d and z'd differ? z' = z + z.d [7], so merely moving z' by
a dst() offset, and starting *from* a time already in DST (we know z.d != 0),
would have to change the result dst() returns: we start in DST, and moving
a little further into it takes us out of DST.
There isn't a sane case where this can happen. The closest it gets is at
the end of DST, where there's an hour in UTC with no spelling in a hybrid
tzinfo class. In US Eastern, that's 5:MM UTC = 0:MM EST = 1:MM EDT. During
that hour, on an Eastern clock 1:MM is taken as being in standard time (6:MM
UTC) because the docs insist on that, but 0:MM is taken as being in daylight
time (4:MM UTC). There is no local time mapping to 5:MM UTC. The local
clock jumps from 1:59 back to 1:00 again, and repeats the 1:MM hour in
standard time. Since that's what the local clock *does*, we want to map both
UTC hours 5:MM and 6:MM to 1:MM Eastern. The result is ambiguous
in local time, but so it goes -- it's the way the local clock works.
When x = 5:MM UTC is the input to this algorithm, x.o=0, y.o=-5 and y.d=0,
so z=0:MM. z.d=60 (minutes) then, so [5] doesn't hold and we keep going.
z' = z + z.d = 1:MM then, and z'.d=0, and z'.d - z.d = -60 != 0 so [8]
(correctly) concludes that z' is not UTC-equivalent to x.
Because we know z.d said z was in daylight time (else [5] would have held and
we would have stopped then), and we know z.d != z'.d (else [8] would have held
and we have stopped then), and there are only 2 possible values dst() can
return in Eastern, it follows that z'.d must be 0 (which it is in the example,
but the reasoning doesn't depend on the example -- it depends on there being
two possible dst() outcomes, one zero and the other non-zero). Therefore
z' must be in standard time, and is the spelling we want in this case.
Note again that z' is not UTC-equivalent as far as the hybrid tzinfo class is
concerned (because it takes z' as being in standard time rather than the
daylight time we intend here), but returning it gives the real-life "local
clock repeats an hour" behavior when mapping the "unspellable" UTC hour into
tz.
When the input is 6:MM, z=1:MM and z.d=0, and we stop at once, again with
the 1:MM standard time spelling we want.
So how can this break? One of the assumptions must be violated. Two
possibilities:
1) [2] effectively says that y.s is invariant across all y belong to a given
time zone. This isn't true if, for political reasons or continental drift,
a region decides to change its base offset from UTC.
2) There may be versions of "double daylight" time where the tail end of
the analysis gives up a step too early. I haven't thought about that
enough to say.
In any case, it's clear that the default fromutc() is strong enough to handle
"almost all" time zones: so long as the standard offset is invariant, it
doesn't matter if daylight time transition points change from year to year, or
if daylight time is skipped in some years; it doesn't matter how large or
small dst() may get within its bounds; and it doesn't even matter if some
perverse time zone returns a negative dst()). So a breaking case must be
pretty bizarre, and a tzinfo subclass can override fromutc() if it is.
"""
#brython does not have a _datetime, so lets comment this out for now.
#try:
# from _datetime import *
#except ImportError:
# pass
#else:
# # Clean up unused names
# del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH,
# _DI100Y, _DI400Y, _DI4Y, _MAXORDINAL, _MONTHNAMES,
# _build_struct_time, _call_tzinfo_method, _check_date_fields,
# _check_time_fields, _check_tzinfo_arg, _check_tzname,
# _check_utc_offset, _cmp, _cmperror, _date_class, _days_before_month,
# _days_before_year, _days_in_month, _format_time, _is_leap,
# _isoweek1monday, _math, _ord2ymd, _time, _time_class, _tzinfo_class,
# _wrap_strftime, _ymd2ord)
# # XXX Since import * above excludes names that start with _,
# # docstring does not get overwritten. In the future, it may be
# # appropriate to maintain a single module level docstring and
# # remove the following line.
# #from _datetime import __doc__
| gpl-3.0 |
noobermin/lspreader | bin/simple-pext.py | 1 | 2845 | #!/usr/bin/env python
'''
Read in pext files from a path with no restart run and output a recarray.
Requires a .lsp file to be in the path, or to be passed. If <output> is
not passed, determine the name from the .lsp name.
Usage:
simple-pext.py [options] [<output>]
Options:
--help -h This help.
--lsp=L -L l Read this .lsp file specifically.
--late-time=TIME -l TIME Cut out after this time.
--reverse -r Reverse Y and Z.
--massE=ME Rest energy of the particle. [default: 0.511e6]
--verbose -v Print verbose.
--range=R -R R Only restrict to this number range of pexts.
--path=P -P P Specify a path instead of the cwd. [default: .]
'''
from docopt import docopt;
from lspreader import lspreader as rd;
from lspreader.pext import add_quantities;
import numpy as np;
from pys import parse_ituple;
import re;
from lspreader.dotlsp import getdim,getpexts
import os
import numpy.lib.recfunctions as rfn;
def _vprint(s):
print(s);
opts = docopt(__doc__,help=True);
vprint = _vprint if opts['--verbose'] else (lambda s: None);
path = opts['--path'];
files = os.listdir(path);
pext = [f for f in files if re.search("pext[0-9]+.p4",f)];
if opts['--range']:
a=parse_ituple(opts['--range'],length=2);
mn,mx = min(*a),max(*a);
else:
mn,mx = float('-inf'),float('inf');
key = [ float(re.search("pext([0-9]+).p4",f).group(1))
for f in pext ];
pext,key = zip(*[
(i,k) for i,k in zip(pext,key)
if mn <= k <= mx]);
if opts['--lsp']:
lspf=opts['--lsp'];
else:
lspf=[f for f in files if re.search(".*\.lsp$",f)][0];
with open(lspf,"r") as f:
lsp=f.read();
if not opts['<output>']:
outname = re.search("(.*)\.lsp$",lspf).group(1)+"-pext";
else:
outname = opts['<output>'];
dim=getdim(lsp);
pexts = getpexts(lsp);
latetime = float(opts['--late-time']) if opts['--late-time'] else None;
vprint('reading in files');
d = [ rd.read(name)
for name in pext ];
d[:] = [
rfn.rec_append_fields(
id, 'species',
np.ones(len(id)).astype(int)*pexts[i]['species'])
for id,i in zip(d,key) ];
vprint('length of d={}'.format(len(d)));
d = [ i for i in d if i['t'].shape[0] > 0];
vprint('length of d={} after remove empties'.format(len(d)));
vprint('cutting out duplicate times');
if len(d) > 1:
d = np.concatenate(d);
elif d == []:
print("empty pext");
quit();
else:
d = d[0];
vprint('sorting by times')
d.sort(order='t');
if latetime:
print('cutting out times greater than {}'.format(latetime));
d = d[ d['t'] <= latetime ];
#calculating quantities
if opts['--reverse']:
dim = dim[:-2] + list(reversed(dim[-2:]))
massE = float(opts['--massE']) if opts['--massE'] else None;
d = add_quantities(d, dim, massE=massE);
np.save(outname, d);
| mit |
CCI-MOC/python-novaclient | novaclient/v2/virtual_interfaces.py | 17 | 1039 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Virtual Interfaces (1.1 extension).
"""
from novaclient import base
class VirtualInterface(base.Resource):
def __repr__(self):
pass
class VirtualInterfaceManager(base.ManagerWithFind):
resource_class = VirtualInterface
def list(self, instance_id):
return self._list('/servers/%s/os-virtual-interfaces' % instance_id,
'virtual_interfaces')
| apache-2.0 |
ClearCorp-dev/account-financial-tools | account_partner_required/__openerp__.py | 8 | 1801 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Account partner required module for OpenERP
# Copyright (C) 2014 Acsone (http://acsone.eu).
# @author Stéphane Bidoul <stephane.bidoul@acsone.eu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account partner required',
'version': '0.1',
'category': 'Generic Modules/Accounting',
'license': 'AGPL-3',
'description': """This module adds an option "partner policy"
on account types.
You have the choice between 3 policies : optional (the default),
always (require a partner), and never (forbid a partner).
This module is useful to enforce a partner on account move lines on
customer and supplier accounts.
Module developed by Stéphane Bidoul <stephane.bidoul@acsone.eu>,
inspired by Alexis de Lattre <alexis.delattre@akretion.com>'s
account_analytic_required module.
""",
'author': 'ACSONE SA/NV',
'website': 'http://acsone.eu/',
'depends': ['account'],
'data': ['account_view.xml'],
'installable': True,
}
| agpl-3.0 |
unindented/streamcode | client/static/jsrepl/extern/python/closured/lib/python2.7/lib2to3/patcomp.py | 304 | 7091 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Pattern compiler.
The grammer is taken from PatternGrammar.txt.
The compiler compiles a pattern to a pytree.*Pattern instance.
"""
__author__ = "Guido van Rossum <guido@python.org>"
# Python imports
import os
import StringIO
# Fairly local imports
from .pgen2 import driver, literals, token, tokenize, parse, grammar
# Really local imports
from . import pytree
from . import pygram
# The pattern grammar file
_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
"PatternGrammar.txt")
class PatternSyntaxError(Exception):
pass
def tokenize_wrapper(input):
"""Tokenizes a string suppressing significant whitespace."""
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
for quintuple in tokens:
type, value, start, end, line_text = quintuple
if type not in skip:
yield quintuple
class PatternCompiler(object):
def __init__(self, grammar_file=_PATTERN_GRAMMAR_FILE):
"""Initializer.
Takes an optional alternative filename for the pattern grammar.
"""
self.grammar = driver.load_grammar(grammar_file)
self.syms = pygram.Symbols(self.grammar)
self.pygrammar = pygram.python_grammar
self.pysyms = pygram.python_symbols
self.driver = driver.Driver(self.grammar, convert=pattern_convert)
def compile_pattern(self, input, debug=False, with_tree=False):
"""Compiles a pattern string to a nested pytree.*Pattern object."""
tokens = tokenize_wrapper(input)
try:
root = self.driver.parse_tokens(tokens, debug=debug)
except parse.ParseError as e:
raise PatternSyntaxError(str(e))
if with_tree:
return self.compile_node(root), root
else:
return self.compile_node(root)
def compile_node(self, node):
"""Compiles a node, recursively.
This is one big switch on the node type.
"""
# XXX Optimize certain Wildcard-containing-Wildcard patterns
# that can be merged
if node.type == self.syms.Matcher:
node = node.children[0] # Avoid unneeded recursion
if node.type == self.syms.Alternatives:
# Skip the odd children since they are just '|' tokens
alts = [self.compile_node(ch) for ch in node.children[::2]]
if len(alts) == 1:
return alts[0]
p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1)
return p.optimize()
if node.type == self.syms.Alternative:
units = [self.compile_node(ch) for ch in node.children]
if len(units) == 1:
return units[0]
p = pytree.WildcardPattern([units], min=1, max=1)
return p.optimize()
if node.type == self.syms.NegatedUnit:
pattern = self.compile_basic(node.children[1:])
p = pytree.NegatedPattern(pattern)
return p.optimize()
assert node.type == self.syms.Unit
name = None
nodes = node.children
if len(nodes) >= 3 and nodes[1].type == token.EQUAL:
name = nodes[0].value
nodes = nodes[2:]
repeat = None
if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater:
repeat = nodes[-1]
nodes = nodes[:-1]
# Now we've reduced it to: STRING | NAME [Details] | (...) | [...]
pattern = self.compile_basic(nodes, repeat)
if repeat is not None:
assert repeat.type == self.syms.Repeater
children = repeat.children
child = children[0]
if child.type == token.STAR:
min = 0
max = pytree.HUGE
elif child.type == token.PLUS:
min = 1
max = pytree.HUGE
elif child.type == token.LBRACE:
assert children[-1].type == token.RBRACE
assert len(children) in (3, 5)
min = max = self.get_int(children[1])
if len(children) == 5:
max = self.get_int(children[3])
else:
assert False
if min != 1 or max != 1:
pattern = pattern.optimize()
pattern = pytree.WildcardPattern([[pattern]], min=min, max=max)
if name is not None:
pattern.name = name
return pattern.optimize()
def compile_basic(self, nodes, repeat=None):
# Compile STRING | NAME [Details] | (...) | [...]
assert len(nodes) >= 1
node = nodes[0]
if node.type == token.STRING:
value = unicode(literals.evalString(node.value))
return pytree.LeafPattern(_type_of_literal(value), value)
elif node.type == token.NAME:
value = node.value
if value.isupper():
if value not in TOKEN_MAP:
raise PatternSyntaxError("Invalid token: %r" % value)
if nodes[1:]:
raise PatternSyntaxError("Can't have details for token")
return pytree.LeafPattern(TOKEN_MAP[value])
else:
if value == "any":
type = None
elif not value.startswith("_"):
type = getattr(self.pysyms, value, None)
if type is None:
raise PatternSyntaxError("Invalid symbol: %r" % value)
if nodes[1:]: # Details present
content = [self.compile_node(nodes[1].children[1])]
else:
content = None
return pytree.NodePattern(type, content)
elif node.value == "(":
return self.compile_node(nodes[1])
elif node.value == "[":
assert repeat is None
subpattern = self.compile_node(nodes[1])
return pytree.WildcardPattern([[subpattern]], min=0, max=1)
assert False, node
def get_int(self, node):
assert node.type == token.NUMBER
return int(node.value)
# Map named tokens to the type value for a LeafPattern
TOKEN_MAP = {"NAME": token.NAME,
"STRING": token.STRING,
"NUMBER": token.NUMBER,
"TOKEN": None}
def _type_of_literal(value):
if value[0].isalpha():
return token.NAME
elif value in grammar.opmap:
return grammar.opmap[value]
else:
return None
def pattern_convert(grammar, raw_node_info):
"""Converts raw node information to a Node or Leaf instance."""
type, value, context, children = raw_node_info
if children or type in grammar.number2symbol:
return pytree.Node(type, children, context=context)
else:
return pytree.Leaf(type, value, context=context)
def compile_pattern(pattern):
return PatternCompiler().compile_pattern(pattern)
| mit |
jhawkesworth/ansible | lib/ansible/modules/network/restconf/restconf_config.py | 29 | 6303 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: restconf_config
version_added: "2.8"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Handles create, update, read and delete of configuration data on RESTCONF enabled devices.
description:
- RESTCONF is a standard mechanisms to allow web applications to configure and manage
data. RESTCONF is a IETF standard and documented on RFC 8040.
- This module allows the user to configure data on RESTCONF enabled devices.
options:
path:
description:
- URI being used to execute API calls.
required: true
content:
description:
- The configuration data in format as specififed in C(format) option. Required unless C(method) is
I(delete).
method:
description:
- The RESTCONF method to manage the configuration change on device. The value I(post) is used to
create a data resource or invoke an operation resource, I(put) is used to replace the target
data resource, I(patch) is used to modify the target resource, and I(delete) is used to delete
the target resource.
required: false
default: post
choices: ['post', 'put', 'patch', 'delete']
format:
description:
- The format of the configuration provided as value of C(content). Accepted values are I(xml) and I(json) and
the given configuration format should be supported by remote RESTCONF server.
default: json
choices: ['json', 'xml']
'''
EXAMPLES = '''
- name: create l3vpn services
restconf_config:
path: /config/ietf-l3vpn-svc:l3vpn-svc/vpn-services
content: |
{
"vpn-service":[
{
"vpn-id": "red_vpn2",
"customer-name": "blue",
"vpn-service-topology": "ietf-l3vpn-svc:any-to-any"
},
{
"vpn-id": "blue_vpn1",
"customer-name": "red",
"vpn-service-topology": "ietf-l3vpn-svc:any-to-any"
}
]
}
'''
RETURN = '''
candidate:
description: The configuration sent to the device.
returned: When the method is not delete
type: dict
sample: |
{
"vpn-service": [
{
"customer-name": "red",
"vpn-id": "blue_vpn1",
"vpn-service-topology": "ietf-l3vpn-svc:any-to-any"
}
]
}
running:
description: The current running configuration on the device.
returned: When the method is not delete
type: dict
sample: |
{
"vpn-service": [
{
"vpn-id": "red_vpn2",
"customer-name": "blue",
"vpn-service-topology": "ietf-l3vpn-svc:any-to-any"
},
{
"vpn-id": "blue_vpn1",
"customer-name": "red",
"vpn-service-topology": "ietf-l3vpn-svc:any-to-any"
}
]
}
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.network.common.utils import dict_diff
from ansible.module_utils.network.restconf import restconf
from ansible.module_utils.six import string_types
def main():
"""entry point for module execution
"""
argument_spec = dict(
path=dict(required=True),
content=dict(),
method=dict(choices=['post', 'put', 'patch', 'delete'], default='post'),
format=dict(choices=['json', 'xml'], default='json'),
)
required_if = [
['method', 'post', ['content']],
['method', 'put', ['content']],
['method', 'patch', ['content']],
]
module = AnsibleModule(
argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True
)
path = module.params['path']
candidate = module.params['content']
method = module.params['method']
format = module.params['format']
if isinstance(candidate, string_types):
candidate = json.loads(candidate)
warnings = list()
result = {'changed': False, 'warnings': warnings}
running = None
response = None
commit = not module.check_mode
try:
running = restconf.get(module, path, output=format)
except ConnectionError as exc:
if exc.code == 404:
running = None
else:
module.fail_json(msg=to_text(exc), code=exc.code)
try:
if method == 'delete':
if running:
if commit:
response = restconf.edit_config(module, path=path, method='DELETE')
result['changed'] = True
else:
warnings.append("delete not executed as resource '%s' does not exist" % path)
else:
if running:
if method == 'post':
module.fail_json(msg="resource '%s' already exist" % path, code=409)
diff = dict_diff(running, candidate)
result['candidate'] = candidate
result['running'] = running
else:
method = 'POST'
diff = candidate
if diff:
if module._diff:
result['diff'] = {'prepared': diff, 'before': candidate, 'after': running}
if commit:
response = restconf.edit_config(module, path=path, content=diff, method=method.upper(), format=format)
result['changed'] = True
except ConnectionError as exc:
module.fail_json(msg=str(exc), code=exc.code)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
michelle192837/test-infra | gubernator/kubelet_parser.py | 20 | 2859 | #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import json
import jinja2
import regex
def parse(lines, highlight_words, filters, objref_dict):
"""
Given filters returns indices of wanted lines from log
Args:
lines: array of log lines
highlight_words: array of words that need to be bolded
filters: dictionary of which filters to apply
objref_dict: a dictionary where the keys are possible filters
and the values are the words to be highlighted
Returns:
matched_lines: ordered array of indices of lines to display
highlight_words: updated highlight_words
"""
matched_lines = []
if not filters["pod"] and objref_dict:
highlight_words = []
# If the filter is on, look for it in the objref_dict
for k in filters:
if k != "pod" and filters[k] and k in objref_dict:
highlight_words.append(objref_dict[k])
words_re = regex.combine_wordsRE(highlight_words)
for n, line in enumerate(lines):
if words_re.search(line):
matched_lines.append(n)
return matched_lines, highlight_words
def make_dict(data, pod_re, objref_dict):
"""
Given the log file and the failed pod name, returns a dictionary
containing the namespace, UID, and other information associated with the pod
and a bool indicating if the pod name string is in the log file.
This dictionary is lifted from the line with the ObjectReference
"""
pod_in_file = False
lines = unicode(jinja2.escape(data)).split('\n')
for line in lines:
if pod_re.search(line):
pod_in_file = True
objref = regex.objref(line)
containerID = regex.containerID(line)
if containerID and not objref_dict.get("ContainerID"):
objref_dict["ContainerID"] = containerID.group(1)
if objref:
objref_dict_re = objref.group(1)
objref_dict_re = re.sub(r'(\w+):', r'"\1": ', objref_dict_re)
objref_dict_re = objref_dict_re.replace('"', '"')
objref_dict_re = json.loads(objref_dict_re)
objref_dict_re.update(objref_dict)
return objref_dict_re, pod_in_file
return objref_dict, pod_in_file
| apache-2.0 |
ksrajkumar/openerp-6.1 | openerp/addons/sale_commission/__init__.py | 1 | 1213 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Pexego Sistemas Informáticos (<http://www.pexego.es>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import saleagent
import invoice
import partner_agent
import partner
import settled
import sale_order
import stock_picking
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
spthaolt/socorro | socorro/collector/wsgi_breakpad_collector.py | 4 | 6762 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import web
import time
import zlib
import cgi
import cStringIO
from contextlib import closing
from socorro.lib.ooid import createNewOoid
from socorro.lib.util import DotDict
from socorro.collector.throttler import DISCARD, IGNORE
from socorro.lib.datetimeutil import utc_now
from configman import RequiredConfig, Namespace, class_converter
#==============================================================================
class BreakpadCollector(RequiredConfig):
#--------------------------------------------------------------------------
# in this section, define any configuration requirements
required_config = Namespace()
required_config.add_option(
'dump_field',
doc='the name of the form field containing the raw dump',
default='upload_file_minidump'
)
required_config.add_option(
'dump_id_prefix',
doc='the prefix to return to the client in front of the OOID',
default='bp-'
)
required_config.add_option(
'accept_submitted_crash_id',
doc='a boolean telling the collector to use a crash_id provided in '
'the crash submission',
default=False
)
required_config.add_option(
'accept_submitted_legacy_processing',
doc='a boolean telling the collector to use a any legacy_processing'
'flag submitted with the crash',
default=False
)
required_config.add_option(
'checksum_method',
doc='a reference to method that accepts a string and calculates a'
'hash value',
default='hashlib.md5',
from_string_converter=class_converter
)
#--------------------------------------------------------------------------
def __init__(self, config):
self.config = config
self.logger = self.config.logger
self.throttler = config.throttler
self.dump_id_prefix = config.collector.dump_id_prefix
self.crash_storage = config.crash_storage
self.dump_field = config.collector.dump_field
#--------------------------------------------------------------------------
uri = '/submit'
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
@staticmethod
def _process_fieldstorage(fs):
if isinstance(fs, list):
return [_process_fieldstorage(x) for x in fs]
elif fs.filename is None:
return fs.value
else:
return fs
#--------------------------------------------------------------------------
def _form_as_mapping(self):
"""this method returns the POST form mapping with any gzip
decompression automatically handled"""
if web.ctx.env.get('HTTP_CONTENT_ENCODING') == 'gzip':
# Handle gzipped form posts
gzip_header = 16 + zlib.MAX_WBITS
data = zlib.decompress(web.webapi.data(), gzip_header)
e = web.ctx.env.copy()
with closing(cStringIO.StringIO(data)) as fp:
# this is how web.webapi.rawinput() handles
# multipart/form-data, as of this writing
fs = cgi.FieldStorage(fp=fp, environ=e, keep_blank_values=1)
form = web.utils.storage(
[(k, self._process_fieldstorage(fs[k])) for k in fs.keys()]
)
return form
return web.webapi.rawinput()
#--------------------------------------------------------------------------
@staticmethod
def _no_x00_character(value):
if isinstance(value, unicode) and u'\u0000' in value:
return ''.join(c for c in value if c != u'\u0000')
if isinstance(value, str) and '\x00' in value:
return ''.join(c for c in value if c != '\x00')
return value
#--------------------------------------------------------------------------
def _get_raw_crash_from_form(self):
"""this method creates the raw_crash and the dumps mapping using the
POST form"""
dumps = DotDict()
raw_crash = DotDict()
raw_crash.dump_checksums = DotDict()
for name, value in self._form_as_mapping().iteritems():
name = self._no_x00_character(name)
if isinstance(value, basestring):
if name != "dump_checksums":
raw_crash[name] = self._no_x00_character(value)
elif hasattr(value, 'file') and hasattr(value, 'value'):
dumps[name] = value.value
raw_crash.dump_checksums[name] = \
self.config.collector.checksum_method(
value.value
).hexdigest()
elif isinstance(value, int):
raw_crash[name] = value
else:
raw_crash[name] = value.value
return raw_crash, dumps
#--------------------------------------------------------------------------
def POST(self, *args):
raw_crash, dumps = self._get_raw_crash_from_form()
current_timestamp = utc_now()
raw_crash.submitted_timestamp = current_timestamp.isoformat()
# legacy - ought to be removed someday
raw_crash.timestamp = time.time()
if (not self.config.collector.accept_submitted_crash_id
or 'uuid' not in raw_crash
):
crash_id = createNewOoid(current_timestamp)
raw_crash.uuid = crash_id
self.logger.info('%s received', crash_id)
else:
crash_id = raw_crash.uuid
self.logger.info('%s received with existing crash_id:', crash_id)
if ('legacy_processing' not in raw_crash
or not self.config.collector.accept_submitted_legacy_processing
):
raw_crash.legacy_processing, raw_crash.throttle_rate = (
self.throttler.throttle(raw_crash)
)
else:
raw_crash.legacy_processing = int(raw_crash.legacy_processing)
if raw_crash.legacy_processing == DISCARD:
self.logger.info('%s discarded', crash_id)
return "Discarded=1\n"
if raw_crash.legacy_processing == IGNORE:
self.logger.info('%s ignored', crash_id)
return "Unsupported=1\n"
self.config.crash_storage.save_raw_crash(
raw_crash,
dumps,
crash_id
)
self.logger.info('%s accepted', crash_id)
return "CrashID=%s%s\n" % (self.dump_id_prefix, crash_id)
| mpl-2.0 |
tjlaboss/openmc | openmc/model/model.py | 8 | 7774 | from collections.abc import Iterable
from pathlib import Path
import time
import openmc
from openmc.checkvalue import check_type, check_value
class Model:
"""Model container.
This class can be used to store instances of :class:`openmc.Geometry`,
:class:`openmc.Materials`, :class:`openmc.Settings`,
:class:`openmc.Tallies`, :class:`openmc.Plots`, and :class:`openmc.CMFD`,
thus making a complete model. The :meth:`Model.export_to_xml` method will
export XML files for all attributes that have been set. If the
:meth:`Model.materials` attribute is not set, it will attempt to create a
``materials.xml`` file based on all materials appearing in the geometry.
Parameters
----------
geometry : openmc.Geometry, optional
Geometry information
materials : openmc.Materials, optional
Materials information
settings : openmc.Settings, optional
Settings information
tallies : openmc.Tallies, optional
Tallies information
plots : openmc.Plots, optional
Plot information
Attributes
----------
geometry : openmc.Geometry
Geometry information
materials : openmc.Materials
Materials information
settings : openmc.Settings
Settings information
tallies : openmc.Tallies
Tallies information
plots : openmc.Plots
Plot information
"""
def __init__(self, geometry=None, materials=None, settings=None,
tallies=None, plots=None):
self.geometry = openmc.Geometry()
self.materials = openmc.Materials()
self.settings = openmc.Settings()
self.tallies = openmc.Tallies()
self.plots = openmc.Plots()
if geometry is not None:
self.geometry = geometry
if materials is not None:
self.materials = materials
if settings is not None:
self.settings = settings
if tallies is not None:
self.tallies = tallies
if plots is not None:
self.plots = plots
@property
def geometry(self):
return self._geometry
@property
def materials(self):
return self._materials
@property
def settings(self):
return self._settings
@property
def tallies(self):
return self._tallies
@property
def plots(self):
return self._plots
@geometry.setter
def geometry(self, geometry):
check_type('geometry', geometry, openmc.Geometry)
self._geometry = geometry
@materials.setter
def materials(self, materials):
check_type('materials', materials, Iterable, openmc.Material)
if isinstance(materials, openmc.Materials):
self._materials = materials
else:
del self._materials[:]
for mat in materials:
self._materials.append(mat)
@settings.setter
def settings(self, settings):
check_type('settings', settings, openmc.Settings)
self._settings = settings
@tallies.setter
def tallies(self, tallies):
check_type('tallies', tallies, Iterable, openmc.Tally)
if isinstance(tallies, openmc.Tallies):
self._tallies = tallies
else:
del self._tallies[:]
for tally in tallies:
self._tallies.append(tally)
@plots.setter
def plots(self, plots):
check_type('plots', plots, Iterable, openmc.Plot)
if isinstance(plots, openmc.Plots):
self._plots = plots
else:
del self._plots[:]
for plot in plots:
self._plots.append(plot)
def deplete(self, timesteps, chain_file=None, method='cecm',
fission_q=None, **kwargs):
"""Deplete model using specified timesteps/power
Parameters
----------
timesteps : iterable of float
Array of timesteps in units of [s]. Note that values are not
cumulative.
chain_file : str, optional
Path to the depletion chain XML file. Defaults to the chain
found under the ``depletion_chain`` in the
:envvar:`OPENMC_CROSS_SECTIONS` environment variable if it exists.
method : str
Integration method used for depletion (e.g., 'cecm', 'predictor')
fission_q : dict, optional
Dictionary of nuclides and their fission Q values [eV].
If not given, values will be pulled from the ``chain_file``.
**kwargs
Keyword arguments passed to integration function (e.g.,
:func:`openmc.deplete.integrator.cecm`)
"""
# Import the depletion module. This is done here rather than the module
# header to delay importing openmc.lib (through openmc.deplete) which
# can be tough to install properly.
import openmc.deplete as dep
# Create OpenMC transport operator
op = dep.Operator(
self.geometry, self.settings, chain_file,
fission_q=fission_q,
)
# Perform depletion
check_value('method', method, ('cecm', 'predictor', 'cf4', 'epc_rk4',
'si_celi', 'si_leqi', 'celi', 'leqi'))
getattr(dep.integrator, method)(op, timesteps, **kwargs)
def export_to_xml(self, directory='.'):
"""Export model to XML files.
Parameters
----------
directory : str
Directory to write XML files to. If it doesn't exist already, it
will be created.
"""
# Create directory if required
d = Path(directory)
if not d.is_dir():
d.mkdir(parents=True)
self.settings.export_to_xml(d)
if not self.settings.dagmc:
self.geometry.export_to_xml(d)
# If a materials collection was specified, export it. Otherwise, look
# for all materials in the geometry and use that to automatically build
# a collection.
if self.materials:
self.materials.export_to_xml(d)
else:
materials = openmc.Materials(self.geometry.get_all_materials()
.values())
materials.export_to_xml(d)
if self.tallies:
self.tallies.export_to_xml(d)
if self.plots:
self.plots.export_to_xml(d)
def run(self, **kwargs):
"""Creates the XML files, runs OpenMC, and returns the path to the last
statepoint file generated.
.. versionchanged:: 0.12
Instead of returning the final k-effective value, this function now
returns the path to the final statepoint written.
Parameters
----------
**kwargs
Keyword arguments passed to :func:`openmc.run`
Returns
-------
Path
Path to the last statepoint written by this run
(None if no statepoint was written)
"""
self.export_to_xml()
# Setting tstart here ensures we don't pick up any pre-existing statepoint
# files in the output directory
tstart = time.time()
last_statepoint = None
openmc.run(**kwargs)
# Get output directory and return the last statepoint written by this run
if self.settings.output and 'path' in self.settings.output:
output_dir = Path(self.settings.output['path'])
else:
output_dir = Path.cwd()
for sp in output_dir.glob('statepoint.*.h5'):
mtime = sp.stat().st_mtime
if mtime >= tstart: # >= allows for poor clock resolution
tstart = mtime
last_statepoint = sp
return last_statepoint
| mit |
sinbazhou/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/lib/tiny_socket.py | 386 | 3270 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import socket
import cPickle
import cStringIO
import marshal
class Myexception(Exception):
def __init__(self, faultCode, faultString):
self.faultCode = faultCode
self.faultString = faultString
self.args = (faultCode, faultString)
class mysocket:
def __init__(self, sock=None):
if sock is None:
self.sock = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
else:
self.sock = sock
self.sock.settimeout(120)
def connect(self, host, port=False):
if not port:
protocol, buf = host.split('//')
host, port = buf.split(':')
self.sock.connect((host, int(port)))
def disconnect(self):
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
def mysend(self, msg, exception=False, traceback=None):
msg = cPickle.dumps([msg,traceback])
size = len(msg)
self.sock.send('%8d' % size)
self.sock.send(exception and "1" or "0")
totalsent = 0
while totalsent < size:
sent = self.sock.send(msg[totalsent:])
if sent == 0:
raise RuntimeError, "Socket connection broken."
totalsent = totalsent + sent
def myreceive(self):
buf=''
while len(buf) < 8:
chunk = self.sock.recv(8 - len(buf))
if chunk == '':
raise RuntimeError, "Socket connection broken."
buf += chunk
size = int(buf)
buf = self.sock.recv(1)
if buf != "0":
exception = buf
else:
exception = False
msg = ''
while len(msg) < size:
chunk = self.sock.recv(size-len(msg))
if chunk == '':
raise RuntimeError, "Socket connection broken."
msg = msg + chunk
msgio = cStringIO.StringIO(msg)
unpickler = cPickle.Unpickler(msgio)
unpickler.find_global = None
res = unpickler.load()
if isinstance(res[0],Exception):
if exception:
raise Myexception(str(res[0]), str(res[1]))
raise res[0]
else:
return res[0]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/test/test_generators.py | 49 | 50158 | tutorial_tests = """
Let's try a simple generator:
>>> def f():
... yield 1
... yield 2
>>> for i in f():
... print(i)
1
2
>>> g = f()
>>> next(g)
1
>>> next(g)
2
"Falling off the end" stops the generator:
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
StopIteration
"return" also stops the generator:
>>> def f():
... yield 1
... return
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in f
StopIteration
>>> next(g) # once stopped, can't be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
"raise StopIteration" stops the generator too:
>>> def f():
... yield 1
... raise StopIteration
... yield 2 # never reached
...
>>> g = f()
>>> next(g)
1
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>> next(g)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
However, they are not exactly equivalent:
>>> def g1():
... try:
... return
... except:
... yield 1
...
>>> list(g1())
[]
>>> def g2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(g2()))
[42]
This may be surprising at first:
>>> def g3():
... try:
... return
... finally:
... yield 1
...
>>> list(g3())
[1]
Let's create an alternate range() function implemented as a generator:
>>> def yrange(n):
... for i in range(n):
... yield i
...
>>> list(yrange(5))
[0, 1, 2, 3, 4]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print("creator", next(r))
... return r
...
>>> def caller():
... r = creator()
... for i in r:
... print("caller", i)
...
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
...
>>> list(zrange(5))
[0, 1, 2, 3, 4]
"""
# The examples from PEP 255.
pep_tests = """
Specification: Yield
Restriction: A generator cannot be resumed while it is actively
running:
>>> def g():
... i = next(me)
... yield i
>>> me = g()
>>> next(me)
Traceback (most recent call last):
...
File "<string>", line 2, in g
ValueError: generator already executing
Specification: Return
Note that return isn't always equivalent to raising StopIteration: the
difference lies in how enclosing try/except constructs are treated.
For example,
>>> def f1():
... try:
... return
... except:
... yield 1
>>> print(list(f1()))
[]
because, as in any function, return simply exits, but
>>> def f2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print(list(f2()))
[42]
because StopIteration is captured by a bare "except", as is any
exception.
Specification: Generators and Exception Propagation
>>> def f():
... return 1//0
>>> def g():
... yield f() # the zero division exception propagates
... yield 42 # and we'll never get here
>>> k = g()
>>> next(k)
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
File "<stdin>", line 2, in f
ZeroDivisionError: integer division or modulo by zero
>>> next(k) # and the generator cannot be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>>
Specification: Try/Except/Finally
>>> def f():
... try:
... yield 1
... try:
... yield 2
... 1//0
... yield 3 # never get here
... except ZeroDivisionError:
... yield 4
... yield 5
... raise
... except:
... yield 6
... yield 7 # the "raise" above stops this
... except:
... yield 8
... yield 9
... try:
... x = 12
... finally:
... yield 10
... yield 11
>>> print(list(f()))
[1, 2, 4, 5, 8, 9, 10, 11]
>>>
Guido's binary tree example.
>>> # A binary tree class.
>>> class Tree:
...
... def __init__(self, label, left=None, right=None):
... self.label = label
... self.left = left
... self.right = right
...
... def __repr__(self, level=0, indent=" "):
... s = level*indent + repr(self.label)
... if self.left:
... s = s + "\\n" + self.left.__repr__(level+1, indent)
... if self.right:
... s = s + "\\n" + self.right.__repr__(level+1, indent)
... return s
...
... def __iter__(self):
... return inorder(self)
>>> # Create a Tree from a list.
>>> def tree(list):
... n = len(list)
... if n == 0:
... return []
... i = n // 2
... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # A recursive generator that generates Tree labels in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # Print the nodes of the tree in in-order.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
>>> # A non-recursive generator.
>>> def inorder(node):
... stack = []
... while node:
... while node.left:
... stack.append(node)
... node = node.left
... yield node.label
... while not node.right:
... try:
... node = stack.pop()
... except IndexError:
... return
... yield node.label
... node = node.right
>>> # Exercise the non-recursive generator.
>>> for x in t:
... print(' '+x, end='')
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
"""
# Examples from Iterator-List and Python-Dev and c.l.py.
email_tests = """
The difference between yielding None and returning it.
>>> def g():
... for i in range(3):
... yield None
... yield None
... return
>>> list(g())
[None, None, None, None]
Ensure that explicitly raising StopIteration acts like any other exception
in try/except, not like a return.
>>> def g():
... yield 1
... try:
... raise StopIteration
... except:
... yield 2
... yield 3
>>> list(g())
[1, 2, 3]
Next one was posted to c.l.py.
>>> def gcomb(x, k):
... "Generate all combinations of k elements from list x."
...
... if k > len(x):
... return
... if k == 0:
... yield []
... else:
... first, rest = x[0], x[1:]
... # A combination does or doesn't contain first.
... # If it does, the remainder is a k-1 comb of rest.
... for c in gcomb(rest, k-1):
... c.insert(0, first)
... yield c
... # If it doesn't contain first, it's a k comb of rest.
... for c in gcomb(rest, k):
... yield c
>>> seq = list(range(1, 5))
>>> for k in range(len(seq) + 2):
... print("%d-combs of %s:" % (k, seq))
... for c in gcomb(seq, k):
... print(" ", c)
0-combs of [1, 2, 3, 4]:
[]
1-combs of [1, 2, 3, 4]:
[1]
[2]
[3]
[4]
2-combs of [1, 2, 3, 4]:
[1, 2]
[1, 3]
[1, 4]
[2, 3]
[2, 4]
[3, 4]
3-combs of [1, 2, 3, 4]:
[1, 2, 3]
[1, 2, 4]
[1, 3, 4]
[2, 3, 4]
4-combs of [1, 2, 3, 4]:
[1, 2, 3, 4]
5-combs of [1, 2, 3, 4]:
From the Iterators list, about the types of these things.
>>> def g():
... yield 1
...
>>> type(g)
<class 'function'>
>>> i = g()
>>> type(i)
<class 'generator'>
>>> [s for s in dir(i) if not s.startswith('_')]
['close', 'gi_code', 'gi_frame', 'gi_running', 'send', 'throw']
>>> print(i.__next__.__doc__)
x.__next__() <==> next(x)
>>> iter(i) is i
True
>>> import types
>>> isinstance(i, types.GeneratorType)
True
And more, added later.
>>> i.gi_running
0
>>> type(i.gi_frame)
<class 'frame'>
>>> i.gi_running = 42
Traceback (most recent call last):
...
AttributeError: readonly attribute
>>> def g():
... yield me.gi_running
>>> me = g()
>>> me.gi_running
0
>>> next(me)
1
>>> me.gi_running
0
A clever union-find implementation from c.l.py, due to David Eppstein.
Sent: Friday, June 29, 2001 12:16 PM
To: python-list@python.org
Subject: Re: PEP 255: Simple Generators
>>> class disjointSet:
... def __init__(self, name):
... self.name = name
... self.parent = None
... self.generator = self.generate()
...
... def generate(self):
... while not self.parent:
... yield self
... for x in self.parent.generator:
... yield x
...
... def find(self):
... return next(self.generator)
...
... def union(self, parent):
... if self.parent:
... raise ValueError("Sorry, I'm not a root!")
... self.parent = parent
...
... def __str__(self):
... return self.name
>>> names = "ABCDEFGHIJKLM"
>>> sets = [disjointSet(name) for name in names]
>>> roots = sets[:]
>>> import random
>>> gen = random.Random(42)
>>> while 1:
... for s in sets:
... print(" %s->%s" % (s, s.find()), end='')
... print()
... if len(roots) > 1:
... s1 = gen.choice(roots)
... roots.remove(s1)
... s2 = gen.choice(roots)
... s1.union(s2)
... print("merged", s1, "into", s2)
... else:
... break
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged K into B
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged A into F
A->F B->B C->C D->D E->E F->F G->G H->H I->I J->J K->B L->L M->M
merged E into F
A->F B->B C->C D->D E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged D into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->M
merged M into C
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->J K->B L->L M->C
merged J into B
A->F B->B C->C D->C E->F F->F G->G H->H I->I J->B K->B L->L M->C
merged B into C
A->F B->C C->C D->C E->F F->F G->G H->H I->I J->C K->C L->L M->C
merged F into G
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->L M->C
merged L into C
A->G B->C C->C D->C E->G F->G G->G H->H I->I J->C K->C L->C M->C
merged G into I
A->I B->C C->C D->C E->I F->I G->I H->H I->I J->C K->C L->C M->C
merged I into H
A->H B->C C->C D->C E->H F->H G->H H->H I->H J->C K->C L->C M->C
merged C into H
A->H B->H C->H D->H E->H F->H G->H H->H I->H J->H K->H L->H M->H
"""
# Emacs turd '
# Fun tests (for sufficiently warped notions of "fun").
fun_tests = """
Build up to a recursive Sieve of Eratosthenes generator.
>>> def firstn(g, n):
... return [next(g) for i in range(n)]
>>> def intsfrom(i):
... while 1:
... yield i
... i += 1
>>> firstn(intsfrom(5), 7)
[5, 6, 7, 8, 9, 10, 11]
>>> def exclude_multiples(n, ints):
... for i in ints:
... if i % n:
... yield i
>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
[1, 2, 4, 5, 7, 8]
>>> def sieve(ints):
... prime = next(ints)
... yield prime
... not_divisible_by_prime = exclude_multiples(prime, ints)
... for p in sieve(not_divisible_by_prime):
... yield p
>>> primes = sieve(intsfrom(2))
>>> firstn(primes, 20)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
Another famous problem: generate all integers of the form
2**i * 3**j * 5**k
in increasing order, where i,j,k >= 0. Trickier than it may look at first!
Try writing it without generators, and correctly, and without generating
3 internal results for each result output.
>>> def times(n, g):
... for i in g:
... yield n * i
>>> firstn(times(10, intsfrom(1)), 10)
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
>>> def merge(g, h):
... ng = next(g)
... nh = next(h)
... while 1:
... if ng < nh:
... yield ng
... ng = next(g)
... elif ng > nh:
... yield nh
... nh = next(h)
... else:
... yield ng
... ng = next(g)
... nh = next(h)
The following works, but is doing a whale of a lot of redundant work --
it's not clear how to get the internal uses of m235 to share a single
generator. Note that me_times2 (etc) each need to see every element in the
result sequence. So this is an example where lazy lists are more natural
(you can look at the head of a lazy list any number of times).
>>> def m235():
... yield 1
... me_times2 = times(2, m235())
... me_times3 = times(3, m235())
... me_times5 = times(5, m235())
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Don't print "too many" of these -- the implementation above is extremely
inefficient: each call of m235() leads to 3 recursive calls, and in
turn each of those 3 more, and so on, and so on, until we've descended
enough levels to satisfy the print stmts. Very odd: when I printed 5
lines of results below, this managed to screw up Win98's malloc in "the
usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
address space, and it *looked* like a very slow leak.
>>> result = m235()
>>> for i in range(3):
... print(firstn(result, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
Heh. Here's one way to get a shared list, complete with an excruciating
namespace renaming trick. The *pretty* part is that the times() and merge()
functions can be reused as-is, because they only assume their stream
arguments are iterable -- a LazyList is the same as a generator to times().
>>> class LazyList:
... def __init__(self, g):
... self.sofar = []
... self.fetch = g.__next__
...
... def __getitem__(self, i):
... sofar, fetch = self.sofar, self.fetch
... while i >= len(sofar):
... sofar.append(fetch())
... return sofar[i]
>>> def m235():
... yield 1
... # Gack: m235 below actually refers to a LazyList.
... me_times2 = times(2, m235)
... me_times3 = times(3, m235)
... me_times5 = times(5, m235)
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Print as many of these as you like -- *this* implementation is memory-
efficient.
>>> m235 = LazyList(m235())
>>> for i in range(5):
... print([m235[j] for j in range(15*i, 15*(i+1))])
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
Ye olde Fibonacci generator, LazyList style.
>>> def fibgen(a, b):
...
... def sum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def tail(g):
... next(g) # throw first away
... for x in g:
... yield x
...
... yield a
... yield b
... for s in sum(iter(fib),
... tail(iter(fib))):
... yield s
>>> fib = LazyList(fibgen(1, 2))
>>> firstn(iter(fib), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
Running after your tail with itertools.tee (new in version 2.4)
The algorithms "m235" (Hamming) and Fibonacci presented above are both
examples of a whole family of FP (functional programming) algorithms
where a function produces and returns a list while the production algorithm
suppose the list as already produced by recursively calling itself.
For these algorithms to work, they must:
- produce at least a first element without presupposing the existence of
the rest of the list
- produce their elements in a lazy manner
To work efficiently, the beginning of the list must not be recomputed over
and over again. This is ensured in most FP languages as a built-in feature.
In python, we have to explicitly maintain a list of already computed results
and abandon genuine recursivity.
This is what had been attempted above with the LazyList class. One problem
with that class is that it keeps a list of all of the generated results and
therefore continually grows. This partially defeats the goal of the generator
concept, viz. produce the results only as needed instead of producing them
all and thereby wasting memory.
Thanks to itertools.tee, it is now clear "how to get the internal uses of
m235 to share a single generator".
>>> from itertools import tee
>>> def m235():
... def _m235():
... yield 1
... for n in merge(times(2, m2),
... merge(times(3, m3),
... times(5, m5))):
... yield n
... m1 = _m235()
... m2, m3, m5, mRes = tee(m1, 4)
... return mRes
>>> it = m235()
>>> for i in range(5):
... print(firstn(it, 15))
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
The "tee" function does just what we want. It internally keeps a generated
result for as long as it has not been "consumed" from all of the duplicated
iterators, whereupon it is deleted. You can therefore print the hamming
sequence during hours without increasing memory usage, or very little.
The beauty of it is that recursive running-after-their-tail FP algorithms
are quite straightforwardly expressed with this Python idiom.
Ye olde Fibonacci generator, tee style.
>>> def fib():
...
... def _isum(g, h):
... while 1:
... yield next(g) + next(h)
...
... def _fib():
... yield 1
... yield 2
... next(fibTail) # throw first away
... for res in _isum(fibHead, fibTail):
... yield res
...
... realfib = _fib()
... fibHead, fibTail, fibRes = tee(realfib, 3)
... return fibRes
>>> firstn(fib(), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
"""
# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
# hackery.
syntax_tests = """
>>> def f():
... return 22
... yield 1
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
>>> def f():
... yield 1
... return 22
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
"return None" is not the same as "return" in a generator:
>>> def f():
... yield 1
... return None
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator
These are fine:
>>> def f():
... yield 1
... return
>>> def f():
... try:
... yield 1
... finally:
... pass
>>> def f():
... try:
... try:
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... pass
... finally:
... pass
>>> def f():
... try:
... try:
... yield 12
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... try:
... x = 12
... finally:
... yield 12
... except:
... return
>>> list(f())
[12, 666]
>>> def f():
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... yield 1
>>> type(f())
<class 'generator'>
>>> def f():
... if "":
... yield None
>>> type(f())
<class 'generator'>
>>> def f():
... return
... try:
... if x==4:
... pass
... elif 0:
... try:
... 1//0
... except SyntaxError:
... pass
... else:
... if 0:
... while 12:
... x += 1
... yield 2 # don't blink
... f(a, b, c, d, e)
... else:
... pass
... except:
... x = 1
... return
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... def g():
... yield 1
...
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... class C:
... def __init__(self):
... yield 1
... def f(self):
... yield 2
>>> type(f())
<class 'NoneType'>
>>> def f():
... if 0:
... return
... if 0:
... yield 2
>>> type(f())
<class 'generator'>
>>> def f():
... if 0:
... lambda x: x # shouldn't trigger here
... return # or here
... def f(i):
... return 2*i # or here
... if 0:
... return 3 # but *this* sucks (line 8)
... if 0:
... yield 2 # because it's a generator (line 10)
Traceback (most recent call last):
SyntaxError: 'return' with argument inside generator
This one caused a crash (see SF bug 567538):
>>> def f():
... for i in range(3):
... try:
... continue
... finally:
... yield i
...
>>> g = f()
>>> print(next(g))
0
>>> print(next(g))
1
>>> print(next(g))
2
>>> print(next(g))
Traceback (most recent call last):
StopIteration
Test the gi_code attribute
>>> def f():
... yield 5
...
>>> g = f()
>>> g.gi_code is f.__code__
True
>>> next(g)
5
>>> next(g)
Traceback (most recent call last):
StopIteration
>>> g.gi_code is f.__code__
True
Test the __name__ attribute and the repr()
>>> def f():
... yield 5
...
>>> g = f()
>>> g.__name__
'f'
>>> repr(g) # doctest: +ELLIPSIS
'<generator object f at ...>'
Lambdas shouldn't have their usual return behavior.
>>> x = lambda: (yield 1)
>>> list(x())
[1]
>>> x = lambda: ((yield 1), (yield 2))
>>> list(x())
[1, 2]
"""
# conjoin is a simple backtracking generator, named in honor of Icon's
# "conjunction" control structure. Pass a list of no-argument functions
# that return iterable objects. Easiest to explain by example: assume the
# function list [x, y, z] is passed. Then conjoin acts like:
#
# def g():
# values = [None] * 3
# for values[0] in x():
# for values[1] in y():
# for values[2] in z():
# yield values
#
# So some 3-lists of values *may* be generated, each time we successfully
# get into the innermost loop. If an iterator fails (is exhausted) before
# then, it "backtracks" to get the next value from the nearest enclosing
# iterator (the one "to the left"), and starts all over again at the next
# slot (pumps a fresh iterator). Of course this is most useful when the
# iterators have side-effects, so that which values *can* be generated at
# each slot depend on the values iterated at previous slots.
def simple_conjoin(gs):
values = [None] * len(gs)
def gen(i):
if i >= len(gs):
yield values
else:
for values[i] in gs[i]():
for x in gen(i+1):
yield x
for x in gen(0):
yield x
# That works fine, but recursing a level and checking i against len(gs) for
# each item produced is inefficient. By doing manual loop unrolling across
# generator boundaries, it's possible to eliminate most of that overhead.
# This isn't worth the bother *in general* for generators, but conjoin() is
# a core building block for some CPU-intensive generator applications.
def conjoin(gs):
n = len(gs)
values = [None] * n
# Do one loop nest at time recursively, until the # of loop nests
# remaining is divisible by 3.
def gen(i):
if i >= n:
yield values
elif (n-i) % 3:
ip1 = i+1
for values[i] in gs[i]():
for x in gen(ip1):
yield x
else:
for x in _gen3(i):
yield x
# Do three loop nests at a time, recursing only if at least three more
# remain. Don't call directly: this is an internal optimization for
# gen's use.
def _gen3(i):
assert i < n and (n-i) % 3 == 0
ip1, ip2, ip3 = i+1, i+2, i+3
g, g1, g2 = gs[i : ip3]
if ip3 >= n:
# These are the last three, so we can yield values directly.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
yield values
else:
# At least 6 loop nests remain; peel off 3 and recurse for the
# rest.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
for x in _gen3(ip3):
yield x
for x in gen(0):
yield x
# And one more approach: For backtracking apps like the Knight's Tour
# solver below, the number of backtracking levels can be enormous (one
# level per square, for the Knight's Tour, so that e.g. a 100x100 board
# needs 10,000 levels). In such cases Python is likely to run out of
# stack space due to recursion. So here's a recursion-free version of
# conjoin too.
# NOTE WELL: This allows large problems to be solved with only trivial
# demands on stack space. Without explicitly resumable generators, this is
# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
# than the fancy unrolled recursive conjoin.
def flat_conjoin(gs): # rename to conjoin to run tests with this instead
n = len(gs)
values = [None] * n
iters = [None] * n
_StopIteration = StopIteration # make local because caught a *lot*
i = 0
while 1:
# Descend.
try:
while i < n:
it = iters[i] = gs[i]().__next__
values[i] = it()
i += 1
except _StopIteration:
pass
else:
assert i == n
yield values
# Backtrack until an older iterator can be resumed.
i -= 1
while i >= 0:
try:
values[i] = iters[i]()
# Success! Start fresh at next level.
i += 1
break
except _StopIteration:
# Continue backtracking.
i -= 1
else:
assert i < 0
break
# A conjoin-based N-Queens solver.
class Queens:
def __init__(self, n):
self.n = n
rangen = range(n)
# Assign a unique int to each column and diagonal.
# columns: n of those, range(n).
# NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
# each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
# based.
# NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
# each, smallest i+j is 0, largest is 2n-2.
# For each square, compute a bit vector of the columns and
# diagonals it covers, and for each row compute a function that
# generates the possiblities for the columns in that row.
self.rowgenerators = []
for i in rangen:
rowuses = [(1 << j) | # column ordinal
(1 << (n + i-j + n-1)) | # NW-SE ordinal
(1 << (n + 2*n-1 + i+j)) # NE-SW ordinal
for j in rangen]
def rowgen(rowuses=rowuses):
for j in rangen:
uses = rowuses[j]
if uses & self.used == 0:
self.used |= uses
yield j
self.used &= ~uses
self.rowgenerators.append(rowgen)
# Generate solutions.
def solve(self):
self.used = 0
for row2col in conjoin(self.rowgenerators):
yield row2col
def printsolution(self, row2col):
n = self.n
assert n == len(row2col)
sep = "+" + "-+" * n
print(sep)
for i in range(n):
squares = [" " for j in range(n)]
squares[row2col[i]] = "Q"
print("|" + "|".join(squares) + "|")
print(sep)
# A conjoin-based Knight's Tour solver. This is pretty sophisticated
# (e.g., when used with flat_conjoin above, and passing hard=1 to the
# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
# creating 10s of thousands of generators then!), and is lengthy.
class Knights:
def __init__(self, m, n, hard=0):
self.m, self.n = m, n
# solve() will set up succs[i] to be a list of square #i's
# successors.
succs = self.succs = []
# Remove i0 from each of its successor's successor lists, i.e.
# successors can't go back to i0 again. Return 0 if we can
# detect this makes a solution impossible, else return 1.
def remove_from_successors(i0, len=len):
# If we remove all exits from a free square, we're dead:
# even if we move to it next, we can't leave it again.
# If we create a square with one exit, we must visit it next;
# else somebody else will have to visit it, and since there's
# only one adjacent, there won't be a way to leave it again.
# Finelly, if we create more than one free square with a
# single exit, we can only move to one of them next, leaving
# the other one a dead end.
ne0 = ne1 = 0
for i in succs[i0]:
s = succs[i]
s.remove(i0)
e = len(s)
if e == 0:
ne0 += 1
elif e == 1:
ne1 += 1
return ne0 == 0 and ne1 < 2
# Put i0 back in each of its successor's successor lists.
def add_to_successors(i0):
for i in succs[i0]:
succs[i].append(i0)
# Generate the first move.
def first():
if m < 1 or n < 1:
return
# Since we're looking for a cycle, it doesn't matter where we
# start. Starting in a corner makes the 2nd move easy.
corner = self.coords2index(0, 0)
remove_from_successors(corner)
self.lastij = corner
yield corner
add_to_successors(corner)
# Generate the second moves.
def second():
corner = self.coords2index(0, 0)
assert self.lastij == corner # i.e., we started in the corner
if m < 3 or n < 3:
return
assert len(succs[corner]) == 2
assert self.coords2index(1, 2) in succs[corner]
assert self.coords2index(2, 1) in succs[corner]
# Only two choices. Whichever we pick, the other must be the
# square picked on move m*n, as it's the only way to get back
# to (0, 0). Save its index in self.final so that moves before
# the last know it must be kept free.
for i, j in (1, 2), (2, 1):
this = self.coords2index(i, j)
final = self.coords2index(3-i, 3-j)
self.final = final
remove_from_successors(this)
succs[final].append(corner)
self.lastij = this
yield this
succs[final].remove(corner)
add_to_successors(this)
# Generate moves 3 thru m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, i)]
break
candidates.append((e, i))
else:
candidates.sort()
for e, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate moves 3 thru m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# matters a lot is 52x52.
def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
# Break ties via max distance from board centerpoint (favor
# corners and edges whenever possible).
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, 0, i)]
break
i1, j1 = self.index2coords(i)
d = (i1 - vmid)**2 + (j1 - hmid)**2
candidates.append((e, -d, i))
else:
candidates.sort()
for e, d, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate the last move.
def last():
assert self.final in succs[self.lastij]
yield self.final
if m*n < 4:
self.squaregenerators = [first]
else:
self.squaregenerators = [first, second] + \
[hard and advance_hard or advance] * (m*n - 3) + \
[last]
def coords2index(self, i, j):
assert 0 <= i < self.m
assert 0 <= j < self.n
return i * self.n + j
def index2coords(self, index):
assert 0 <= index < self.m * self.n
return divmod(index, self.n)
def _init_board(self):
succs = self.succs
del succs[:]
m, n = self.m, self.n
c2i = self.coords2index
offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
(-1, -2), (-2, -1), (-2, 1), (-1, 2)]
rangen = range(n)
for i in range(m):
for j in rangen:
s = [c2i(i+io, j+jo) for io, jo in offsets
if 0 <= i+io < m and
0 <= j+jo < n]
succs.append(s)
# Generate solutions.
def solve(self):
self._init_board()
for x in conjoin(self.squaregenerators):
yield x
def printsolution(self, x):
m, n = self.m, self.n
assert len(x) == m*n
w = len(str(m*n))
format = "%" + str(w) + "d"
squares = [[None] * n for i in range(m)]
k = 1
for i in x:
i1, j1 = self.index2coords(i)
squares[i1][j1] = format % k
k += 1
sep = "+" + ("-" * w + "+") * n
print(sep)
for i in range(m):
row = squares[i]
print("|" + "|".join(row) + "|")
print(sep)
conjoin_tests = """
Generate the 3-bit binary numbers in order. This illustrates dumbest-
possible use of conjoin, just to generate the full cross-product.
>>> for c in conjoin([lambda: iter((0, 1))] * 3):
... print(c)
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
[1, 1, 1]
For efficiency in typical backtracking apps, conjoin() yields the same list
object each time. So if you want to save away a full account of its
generated sequence, you need to copy its results.
>>> def gencopy(iterator):
... for x in iterator:
... yield x[:]
>>> for n in range(10):
... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
... print(n, len(all), all[0] == [0] * n, all[-1] == [1] * n)
0 1 True True
1 2 True True
2 4 True True
3 8 True True
4 16 True True
5 32 True True
6 64 True True
7 128 True True
8 256 True True
9 512 True True
And run an 8-queens solver.
>>> q = Queens(8)
>>> LIMIT = 2
>>> count = 0
>>> for row2col in q.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... q.printsolution(row2col)
Solution 1
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
Solution 2
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
>>> print(count, "solutions in all.")
92 solutions in all.
And run a Knight's Tour on a 10x10 board. Note that there are about
20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
>>> k = Knights(10, 10)
>>> LIMIT = 2
>>> count = 0
>>> for x in k.solve():
... count += 1
... if count <= LIMIT:
... print("Solution", count)
... k.printsolution(x)
... else:
... break
Solution 1
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
Solution 2
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
"""
weakref_tests = """\
Generators are weakly referencable:
>>> import weakref
>>> def gen():
... yield 'foo!'
...
>>> wr = weakref.ref(gen)
>>> wr() is gen
True
>>> p = weakref.proxy(gen)
Generator-iterators are weakly referencable as well:
>>> gi = gen()
>>> wr = weakref.ref(gi)
>>> wr() is gi
True
>>> p = weakref.proxy(gi)
>>> list(p)
['foo!']
"""
coroutine_tests = """\
Sending a value into a started generator:
>>> def f():
... print((yield 1))
... yield 2
>>> g = f()
>>> next(g)
1
>>> g.send(42)
42
2
Sending a value into a new generator produces a TypeError:
>>> f().send("foo")
Traceback (most recent call last):
...
TypeError: can't send non-None value to a just-started generator
Yield by itself yields None:
>>> def f(): yield
>>> list(f())
[None]
An obscene abuse of a yield expression within a generator expression:
>>> list((yield 21) for i in range(4))
[21, None, 21, None, 21, None, 21, None]
And a more sane, but still weird usage:
>>> def f(): list(i for i in [(yield 26)])
>>> type(f())
<class 'generator'>
A yield expression with augmented assignment.
>>> def coroutine(seq):
... count = 0
... while count < 200:
... count += yield
... seq.append(count)
>>> seq = []
>>> c = coroutine(seq)
>>> next(c)
>>> print(seq)
[]
>>> c.send(10)
>>> print(seq)
[10]
>>> c.send(10)
>>> print(seq)
[10, 20]
>>> c.send(10)
>>> print(seq)
[10, 20, 30]
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2)
Traceback (most recent call last):
...
SyntaxError: 'yield' outside function
>>> def f(): return lambda x=(yield): 1
Traceback (most recent call last):
...
SyntaxError: 'return' with argument inside generator
>>> def f(): x = yield = y
Traceback (most recent call last):
...
SyntaxError: assignment to yield expression not possible
>>> def f(): (yield bar) = y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
>>> def f(): (yield bar) += y
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression
Now check some throw() conditions:
>>> def f():
... while True:
... try:
... print((yield))
... except ValueError as v:
... print("caught ValueError (%s)" % (v))
>>> import sys
>>> g = f()
>>> next(g)
>>> g.throw(ValueError) # type only
caught ValueError ()
>>> g.throw(ValueError("xyz")) # value only
caught ValueError (xyz)
>>> g.throw(ValueError, ValueError(1)) # value+matching type
caught ValueError (1)
>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
caught ValueError (1)
>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
caught ValueError (1)
>>> g.throw(ValueError(1), "foo") # bad args
Traceback (most recent call last):
...
TypeError: instance exception may not have a separate value
>>> g.throw(ValueError, "foo", 23) # bad args
Traceback (most recent call last):
...
TypeError: throw() third argument must be a traceback object
>>> g.throw("abc")
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not str
>>> g.throw(0)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not int
>>> g.throw(list)
Traceback (most recent call last):
...
TypeError: exceptions must be classes or instances deriving from BaseException, not type
>>> def throw(g,exc):
... try:
... raise exc
... except:
... g.throw(*sys.exc_info())
>>> throw(g,ValueError) # do it with traceback included
caught ValueError ()
>>> g.send(1)
1
>>> throw(g,TypeError) # terminate the generator
Traceback (most recent call last):
...
TypeError
>>> print(g.gi_frame)
None
>>> g.send(2)
Traceback (most recent call last):
...
StopIteration
>>> g.throw(ValueError,6) # throw on closed generator
Traceback (most recent call last):
...
ValueError: 6
>>> f().throw(ValueError,7) # throw on just-opened generator
Traceback (most recent call last):
...
ValueError: 7
Now let's try closing a generator:
>>> def f():
... try: yield
... except GeneratorExit:
... print("exiting")
>>> g = f()
>>> next(g)
>>> g.close()
exiting
>>> g.close() # should be no-op now
>>> f().close() # close on just-opened generator should be fine
>>> def f(): yield # an even simpler generator
>>> f().close() # close before opening
>>> g = f()
>>> next(g)
>>> g.close() # close normally
And finalization:
>>> def f():
... try: yield
... finally:
... print("exiting")
>>> g = f()
>>> next(g)
>>> del g
exiting
GeneratorExit is not caught by except Exception:
>>> def f():
... try: yield
... except Exception:
... print('except')
... finally:
... print('finally')
>>> g = f()
>>> next(g)
>>> del g
finally
Now let's try some ill-behaved generators:
>>> def f():
... try: yield
... except GeneratorExit:
... yield "foo!"
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
RuntimeError: generator ignored GeneratorExit
>>> g.close()
Our ill-behaved code should be invoked during GC:
>>> import sys, io
>>> old, sys.stderr = sys.stderr, io.StringIO()
>>> g = f()
>>> next(g)
>>> del g
>>> sys.stderr.getvalue().startswith(
... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
... )
True
>>> sys.stderr = old
And errors thrown during closing should propagate:
>>> def f():
... try: yield
... except GeneratorExit:
... raise TypeError("fie!")
>>> g = f()
>>> next(g)
>>> g.close()
Traceback (most recent call last):
...
TypeError: fie!
Ensure that various yield expression constructs make their
enclosing function a generator:
>>> def f(): x += yield
>>> type(f())
<class 'generator'>
>>> def f(): x = yield
>>> type(f())
<class 'generator'>
>>> def f(): lambda x=(yield): 1
>>> type(f())
<class 'generator'>
>>> def f(): x=(i for i in (yield) if (yield))
>>> type(f())
<class 'generator'>
>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
>>> data = [1,2]
>>> g = f(data)
>>> type(g)
<class 'generator'>
>>> g.send(None)
'a'
>>> data
[1, 2]
>>> g.send(0)
'b'
>>> data
[27, 2]
>>> try: g.send(1)
... except StopIteration: pass
>>> data
[27, 27]
"""
refleaks_tests = """
Prior to adding cycle-GC support to itertools.tee, this code would leak
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
>>> import itertools
>>> def leak():
... class gen:
... def __iter__(self):
... return self
... def __next__(self):
... return self.item
... g = gen()
... head, tail = itertools.tee(g)
... g.item = head
... return head
>>> it = leak()
Make sure to also test the involvement of the tee-internal teedataobject,
which stores returned items.
>>> item = next(it)
This test leaked at one point due to generator finalization/destruction.
It was copied from Lib/test/leakers/test_generator_cycle.py before the file
was removed.
>>> def leak():
... def gen():
... while True:
... yield g
... g = gen()
>>> leak()
This test isn't really generator related, but rather exception-in-cleanup
related. The coroutine tests (above) just happen to cause an exception in
the generator's __del__ (tp_del) method. We can also test for this
explicitly, without generators. We do have to redirect stderr to avoid
printing warnings and to doublecheck that we actually tested what we wanted
to test.
>>> import sys, io
>>> old = sys.stderr
>>> try:
... sys.stderr = io.StringIO()
... class Leaker:
... def __del__(self):
... raise RuntimeError
...
... l = Leaker()
... del l
... err = sys.stderr.getvalue().strip()
... err.startswith(
... "Exception RuntimeError: RuntimeError() in <"
... )
... err.endswith("> ignored")
... len(err.splitlines())
... finally:
... sys.stderr = old
True
True
1
These refleak tests should perhaps be in a testfile of their own,
test_generators just happened to be the test that drew these out.
"""
__test__ = {"tut": tutorial_tests,
"pep": pep_tests,
"email": email_tests,
"fun": fun_tests,
"syntax": syntax_tests,
"conjoin": conjoin_tests,
"weakref": weakref_tests,
"coroutine": coroutine_tests,
"refleaks": refleaks_tests,
}
# Magic test name that regrtest.py invokes *after* importing this module.
# This worms around a bootstrap problem.
# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
# so this works as expected in both ways of running regrtest.
def test_main(verbose=None):
from test import support, test_generators
support.run_doctest(test_generators, verbose)
# This part isn't needed for regrtest, but for running the test directly.
if __name__ == "__main__":
test_main(1)
| apache-2.0 |
Sorsly/subtle | google-cloud-sdk/platform/bq/third_party/oauth2client/keyring_storage.py | 8 | 3242 | #!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A keyring based Storage.
A Storage for Credentials that uses the keyring module.
"""
import threading
import keyring
from oauth2client.client import Credentials
from oauth2client.client import Storage as BaseStorage
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from the keyring.
To use this module you must have the keyring module installed. See
<http://pypi.python.org/pypi/keyring/>. This is an optional module and is not
installed with oauth2client by default because it does not work on all the
platforms that oauth2client supports, such as Google App Engine.
The keyring module <http://pypi.python.org/pypi/keyring/> is a cross-platform
library for access the keyring capabilities of the local system. The user will
be prompted for their keyring password when this module is used, and the
manner in which the user is prompted will vary per platform.
Usage:
from oauth2client.keyring_storage import Storage
s = Storage('name_of_application', 'user1')
credentials = s.get()
"""
def __init__(self, service_name, user_name):
"""Constructor.
Args:
service_name: string, The name of the service under which the credentials
are stored.
user_name: string, The name of the user to store credentials for.
"""
self._service_name = service_name
self._user_name = user_name
self._lock = threading.Lock()
def acquire_lock(self):
"""Acquires any lock necessary to access this Storage.
This lock is not reentrant."""
self._lock.acquire()
def release_lock(self):
"""Release the Storage lock.
Trying to release a lock that isn't held will result in a
RuntimeError.
"""
self._lock.release()
def locked_get(self):
"""Retrieve Credential from file.
Returns:
oauth2client.client.Credentials
"""
credentials = None
content = keyring.get_password(self._service_name, self._user_name)
if content is not None:
try:
credentials = Credentials.new_from_json(content)
credentials.set_store(self)
except ValueError:
pass
return credentials
def locked_put(self, credentials):
"""Write Credentials to file.
Args:
credentials: Credentials, the credentials to store.
"""
keyring.set_password(self._service_name, self._user_name,
credentials.to_json())
def locked_delete(self):
"""Delete Credentials file.
Args:
credentials: Credentials, the credentials to store.
"""
keyring.set_password(self._service_name, self._user_name, '')
| mit |
jiemohuishou/virt-manager-0.9.3 | src/virtManagerTui/changehost.py | 3 | 2331 | # changehost.py - Copyright (C) 2009 Red Hat, Inc.
# Written by Darryl L. Pierce <dpierce@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
import snack
import logging
from hostlistconfigscreen import HostListConfigScreen
CONNECTION_LIST_PAGE = 1
CONNECTED_PAGE = 2
class ChangeHostConfigScreen(HostListConfigScreen):
def __init__(self):
HostListConfigScreen.__init__(self, "")
def get_title(self):
return "Currently: %s" % self.get_libvirt().get_url()
def get_elements_for_page(self, screen, page):
if page is CONNECTION_LIST_PAGE:
return self.get_connection_list_page(screen)
elif page is CONNECTED_PAGE:
return self.get_connected_page(screen)
def process_input(self, page):
if page is CONNECTION_LIST_PAGE:
logging.info("Changing libvirt connection to %s",
self.get_selected_connection())
self.get_libvirt().open_connection(self.get_selected_connection())
elif page is CONNECTED_PAGE:
self.set_finished()
def page_has_next(self, page):
if page is CONNECTION_LIST_PAGE:
return self.has_selectable_connections()
return False
def page_has_back(self, page):
return page > CONNECTION_LIST_PAGE
def page_has_finish(self, page):
return page is CONNECTED_PAGE
def get_connected_page(self, screen):
ignore = screen
return [snack.Label("Connected to %s" % self.get_selected_connection())]
def ChangeHost():
screen = ChangeHostConfigScreen()
screen.start()
| gpl-2.0 |
auready/django | tests/sites_tests/tests.py | 40 | 13012 | from django.apps import apps
from django.apps.registry import Apps
from django.conf import settings
from django.contrib.sites import models
from django.contrib.sites.management import create_default_site
from django.contrib.sites.middleware import CurrentSiteMiddleware
from django.contrib.sites.models import Site, clear_site_cache
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.signals import post_migrate
from django.http import HttpRequest, HttpResponse
from django.test import TestCase, modify_settings, override_settings
from django.test.utils import captured_stdout
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class SitesFrameworkTests(TestCase):
multi_db = True
def setUp(self):
self.site = Site(
id=settings.SITE_ID,
domain="example.com",
name="example.com",
)
self.site.save()
def tearDown(self):
Site.objects.clear_cache()
def test_site_manager(self):
# Make sure that get_current() does not return a deleted Site object.
s = Site.objects.get_current()
self.assertIsInstance(s, Site)
s.delete()
with self.assertRaises(ObjectDoesNotExist):
Site.objects.get_current()
def test_site_cache(self):
# After updating a Site object (e.g. via the admin), we shouldn't return a
# bogus value from the SITE_CACHE.
site = Site.objects.get_current()
self.assertEqual("example.com", site.name)
s2 = Site.objects.get(id=settings.SITE_ID)
s2.name = "Example site"
s2.save()
site = Site.objects.get_current()
self.assertEqual("Example site", site.name)
def test_delete_all_sites_clears_cache(self):
# When all site objects are deleted the cache should also
# be cleared and get_current() should raise a DoesNotExist.
self.assertIsInstance(Site.objects.get_current(), Site)
Site.objects.all().delete()
with self.assertRaises(Site.DoesNotExist):
Site.objects.get_current()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_get_current_site(self):
# The correct Site object is returned
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
site = get_current_site(request)
self.assertIsInstance(site, Site)
self.assertEqual(site.id, settings.SITE_ID)
# An exception is raised if the sites framework is installed
# but there is no matching Site
site.delete()
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# A RequestSite is returned if the sites framework is not installed
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
site = get_current_site(request)
self.assertIsInstance(site, RequestSite)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com'])
def test_get_current_site_no_site_id(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
del settings.SITE_ID
site = get_current_site(request)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com'])
def test_get_current_site_host_with_trailing_dot(self):
"""
The site is matched if the name in the request has a trailing dot.
"""
request = HttpRequest()
request.META = {
'SERVER_NAME': 'example.com.',
'SERVER_PORT': '80',
}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com', 'example.net'])
def test_get_current_site_no_site_id_and_handle_port_fallback(self):
request = HttpRequest()
s1 = self.site
s2 = Site.objects.create(domain='example.com:80', name='example.com:80')
# Host header without port
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with port - match, no fallback without port
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site, s2)
# Host header with port - no match, fallback without port
request.META = {'HTTP_HOST': 'example.com:81'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with non-matching domain
request.META = {'HTTP_HOST': 'example.net'}
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# Ensure domain for RequestSite always matches host header
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com:80')
def test_domain_name_with_whitespaces(self):
# Regression for #17320
# Domain names are not allowed contain whitespace characters
site = Site(name="test name", domain="test test")
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ttest"
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ntest"
with self.assertRaises(ValidationError):
site.full_clean()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_clear_site_cache(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
self.assertEqual(models.SITE_CACHE, {})
get_current_site(request)
expected_cache = {self.site.id: self.site}
self.assertEqual(models.SITE_CACHE, expected_cache)
with self.settings(SITE_ID=''):
get_current_site(request)
expected_cache.update({self.site.domain: self.site})
self.assertEqual(models.SITE_CACHE, expected_cache)
clear_site_cache(Site, instance=self.site, using='default')
self.assertEqual(models.SITE_CACHE, {})
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example2.com'])
def test_clear_site_cache_domain(self):
site = Site.objects.create(name='example2.com', domain='example2.com')
request = HttpRequest()
request.META = {
"SERVER_NAME": "example2.com",
"SERVER_PORT": "80",
}
get_current_site(request) # prime the models.SITE_CACHE
expected_cache = {site.domain: site}
self.assertEqual(models.SITE_CACHE, expected_cache)
# Site exists in 'default' database so using='other' shouldn't clear.
clear_site_cache(Site, instance=site, using='other')
self.assertEqual(models.SITE_CACHE, expected_cache)
# using='default' should clear.
clear_site_cache(Site, instance=site, using='default')
self.assertEqual(models.SITE_CACHE, {})
def test_unique_domain(self):
site = Site(domain=self.site.domain)
msg = 'Site with this Domain name already exists.'
with self.assertRaisesMessage(ValidationError, msg):
site.validate_unique()
def test_site_natural_key(self):
self.assertEqual(Site.objects.get_by_natural_key(self.site.domain), self.site)
self.assertEqual(self.site.natural_key(), (self.site.domain,))
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_requestsite_save_notimplemented_msg(self):
# Test response msg for RequestSite.save NotImplementedError
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
}
msg = 'RequestSite cannot be saved.'
with self.assertRaisesMessage(NotImplementedError, msg):
RequestSite(request).save()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_requestsite_delete_notimplemented_msg(self):
# Test response msg for RequestSite.delete NotImplementedError
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
}
msg = 'RequestSite cannot be deleted.'
with self.assertRaisesMessage(NotImplementedError, msg):
RequestSite(request).delete()
class JustOtherRouter:
def allow_migrate(self, db, app_label, **hints):
return db == 'other'
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class CreateDefaultSiteTests(TestCase):
multi_db = True
def setUp(self):
self.app_config = apps.get_app_config('sites')
# Delete the site created as part of the default migration process.
Site.objects.all().delete()
def test_basic(self):
"""
#15346, #15573 - create_default_site() creates an example site only if
none exist.
"""
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertIn("Creating example.com", stdout.getvalue())
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertEqual("", stdout.getvalue())
@override_settings(DATABASE_ROUTERS=[JustOtherRouter()])
def test_multi_db_with_router(self):
"""
#16353, #16828 - The default site creation should respect db routing.
"""
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertFalse(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_multi_db(self):
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertTrue(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_save_another(self):
"""
#17415 - Another site can be created right after the default one.
On some backends the sequence needs to be reset after saving with an
explicit ID. There shouldn't be a sequence collisions by saving another
site. This test is only meaningful with databases that use sequences
for automatic primary keys such as PostgreSQL and Oracle.
"""
create_default_site(self.app_config, verbosity=0)
Site(domain='example2.com', name='example2.com').save()
def test_signal(self):
"""
#23641 - Sending the ``post_migrate`` signal triggers creation of the
default site.
"""
post_migrate.send(sender=self.app_config, app_config=self.app_config, verbosity=0)
self.assertTrue(Site.objects.exists())
@override_settings(SITE_ID=35696)
def test_custom_site_id(self):
"""
#23945 - The configured ``SITE_ID`` should be respected.
"""
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 35696)
@override_settings() # Restore original ``SITE_ID`` afterwards.
def test_no_site_id(self):
"""
#24488 - The pk should default to 1 if no ``SITE_ID`` is configured.
"""
del settings.SITE_ID
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 1)
def test_unavailable_site_model(self):
"""
#24075 - A Site shouldn't be created if the model isn't available.
"""
apps = Apps()
create_default_site(self.app_config, verbosity=0, apps=apps)
self.assertFalse(Site.objects.exists())
class MiddlewareTest(TestCase):
def test_old_style_request(self):
"""The request has correct `site` attribute."""
middleware = CurrentSiteMiddleware()
request = HttpRequest()
middleware.process_request(request)
self.assertEqual(request.site.id, settings.SITE_ID)
def test_request(self):
def get_response(request):
return HttpResponse(str(request.site.id))
response = CurrentSiteMiddleware(get_response)(HttpRequest())
self.assertContains(response, settings.SITE_ID)
| bsd-3-clause |
t794104/ansible | lib/ansible/modules/network/exos/exos_config.py | 32 | 17323 | #!/usr/bin/python
# Copyright: (c) 2018, Extreme Networks Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: exos_config
version_added: "2.7"
author: "Lance Richardson (@hlrichardson)"
short_description: Manage Extreme Networks EXOS configuration sections
description:
- Extreme EXOS configurations use a simple flat text file syntax.
This module provides an implementation for working with EXOS
configuration lines in a deterministic way.
notes:
- Tested against EXOS version 22.6.0b19
options:
lines:
description:
- The ordered set of commands that should be configured in the
section. The commands must be the exact same commands as found
in the device running-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
aliases: ['commands']
src:
description:
- Specifies the source path to the file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(lines).
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system.
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line. If
match is set to I(strict), command lines are matched with respect
to position. If match is set to I(exact), command lines
must be an equal match. Finally, if match is set to I(none), the
module will not attempt to compare the source configuration with
the running configuration on the remote device.
default: line
choices: ['line', 'strict', 'exact', 'none']
replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the replace argument is set to I(line) then
the modified lines are pushed to the device in configuration
mode. If the replace argument is set to I(block) then the entire
command block is pushed to the device in configuration mode if any
line is not correct.
default: line
choices: ['line', 'block']
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. If the C(backup_options) value is not given,
the backup file is written to the C(backup) folder in the playbook
root directory. If the directory does not exist, it is created.
type: bool
default: 'no'
running_config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(running_config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
aliases: ['config']
defaults:
description:
- This argument specifies whether or not to collect all defaults
when getting the remote device running config. When enabled,
the module will get the current config by issuing the command
C(show running-config all).
type: bool
default: 'no'
save_when:
description:
- When changes are made to the device running-configuration, the
changes are not copied to non-volatile storage by default. Using
this argument will change that behavior. If the argument is set to
I(always), then the running-config will always be copied to the
startup-config and the I(modified) flag will always be set to
True. If the argument is set to I(modified), then the running-config
will only be copied to the startup-config if it has changed since
the last save to startup-config. If the argument is set to
I(never), the running-config will never be copied to the
startup-config. If the argument is set to I(changed), then the running-config
will only be copied to the startup-config if the task has made a change.
default: never
choices: ['always', 'never', 'modified', 'changed']
diff_against:
description:
- When using the C(ansible-playbook --diff) command line argument
the module can generate diffs against different sources.
- When this option is configure as I(startup), the module will return
the diff of the running-config against the startup-config.
- When this option is configured as I(intended), the module will
return the diff of the running-config against the configuration
provided in the C(intended_config) argument.
- When this option is configured as I(running), the module will
return the before and after diff of the running-config with respect
to any changes made to the device configuration.
default: running
choices: ['running', 'startup', 'intended']
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be
ignored during the diff. This is used for lines in the configuration
that are automatically updated by the system. This argument takes
a list of regular expressions or exact line matches.
intended_config:
description:
- The C(intended_config) provides the master configuration that
the node should conform to and is used to check the final
running-config against. This argument will not modify any settings
on the remote device and is strictly used to check the compliance
of the current device's configuration against. When specifying this
argument, the task should also modify the C(diff_against) value and
set it to I(intended).
backup_options:
description:
- This is a dict object containing configurable options related to backup file path.
The value of this option is read only when C(backup) is set to I(yes), if C(backup) is set
to I(no) this option will be silently ignored.
suboptions:
filename:
description:
- The filename to be used to store the backup configuration. If the the filename
is not given it will be generated based on the hostname, current time and date
in format defined by <hostname>_config.<current-date>@<current-time>
dir_path:
description:
- This option provides the path ending with directory name in which the backup
configuration file will be stored. If the directory does not exist it will be first
created and the filename is either the value of C(filename) or default filename
as described in C(filename) options description. If the path value is not given
in that case a I(backup) directory will be created in the current working directory
and backup configuration will be copied in C(filename) within I(backup) directory.
type: path
type: dict
version_added: "2.8"
"""
EXAMPLES = """
- name: configure SNMP system name
exos_config:
lines: configure snmp sysName "{{ inventory_hostname }}"
- name: configure interface settings
exos_config:
lines:
- configure ports 2 description-string "Master Uplink"
backup: yes
- name: check the running-config against master config
exos_config:
diff_against: intended
intended_config: "{{ lookup('file', 'master.cfg') }}"
- name: check the startup-config against the running-config
exos_config:
diff_against: startup
diff_ignore_lines:
- ntp clock .*
- name: save running to startup when modified
exos_config:
save_when: modified
- name: configurable backup path
exos_config:
lines:
- configure ports 2 description-string "Master Uplink"
backup: yes
backup_options:
filename: backup.cfg
dir_path: /home/user
"""
RETURN = """
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['switch-attributes hostname foo', 'router ospf', 'area 0']
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['create vlan "foo"', 'configure snmp sysName "x620-red"']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/x870_config.2018-08-08@15:00:21
"""
import re
from ansible.module_utils.network.exos.exos import run_commands, get_config, load_config, get_diff
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import NetworkConfig, dumps
from ansible.module_utils._text import to_text
from ansible.module_utils.network.common.utils import to_list
__metaclass__ = type
def get_running_config(module, current_config=None, flags=None):
contents = module.params['running_config']
if not contents:
if current_config:
contents = current_config.config_text
else:
contents = get_config(module, flags=flags)
return contents
def get_startup_config(module, flags=None):
reply = run_commands(module, {'command': 'show switch', 'output': 'text'})
match = re.search(r'Config Selected: +(\S+)\.cfg', to_text(reply, errors='surrogate_or_strict').strip(), re.MULTILINE)
if match:
cfgname = match.group(1).strip()
command = ' '.join(['debug cfgmgr show configuration file', cfgname])
if flags:
command += ' '.join(to_list(flags)).strip()
reply = run_commands(module, {'command': command, 'output': 'text'})
data = reply[0]
else:
data = ''
return data
def get_candidate(module):
candidate = NetworkConfig(indent=1)
if module.params['src']:
candidate.load(module.params['src'])
elif module.params['lines']:
candidate.add(module.params['lines'])
candidate = dumps(candidate, 'raw')
return candidate
def save_config(module, result):
result['changed'] = True
if not module.check_mode:
command = {"command": "save configuration",
"prompt": "Do you want to save configuration", "answer": "y"}
run_commands(module, command)
else:
module.warn('Skipping command `save configuration` '
'due to check_mode. Configuration not copied to '
'non-volatile storage')
def main():
""" main entry point for module execution
"""
backup_spec = dict(
filename=dict(),
dir_path=dict(type='path')
)
argument_spec = dict(
src=dict(type='path'),
lines=dict(aliases=['commands'], type='list'),
before=dict(type='list'),
after=dict(type='list'),
match=dict(default='line', choices=['line', 'strict', 'exact', 'none']),
replace=dict(default='line', choices=['line', 'block']),
running_config=dict(aliases=['config']),
intended_config=dict(),
defaults=dict(type='bool', default=False),
backup=dict(type='bool', default=False),
backup_options=dict(type='dict', options=backup_spec),
save_when=dict(choices=['always', 'never', 'modified', 'changed'], default='never'),
diff_against=dict(choices=['startup', 'intended', 'running'], default='running'),
diff_ignore_lines=dict(type='list'),
)
mutually_exclusive = [('lines', 'src')]
required_if = [('match', 'strict', ['lines']),
('match', 'exact', ['lines']),
('replace', 'block', ['lines']),
('diff_against', 'intended', ['intended_config'])]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
if warnings:
result['warnings'] = warnings
config = None
flags = ['detail'] if module.params['defaults'] else []
diff_ignore_lines = module.params['diff_ignore_lines']
if module.params['backup'] or (module._diff and module.params['diff_against'] == 'running'):
contents = get_config(module, flags=flags)
config = NetworkConfig(indent=1, contents=contents)
if module.params['backup']:
result['__backup__'] = contents
if any((module.params['lines'], module.params['src'])):
match = module.params['match']
replace = module.params['replace']
candidate = get_candidate(module)
running = get_running_config(module, config)
try:
response = get_diff(module, candidate=candidate, running=running, diff_match=match, diff_ignore_lines=diff_ignore_lines, diff_replace=replace)
except ConnectionError as exc:
module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
config_diff = response.get('config_diff')
if config_diff:
commands = config_diff.split('\n')
if module.params['before']:
commands[:0] = module.params['before']
if module.params['after']:
commands.extend(module.params['after'])
result['commands'] = commands
result['updates'] = commands
# send the configuration commands to the device and merge
# them with the current running config
if not module.check_mode:
if commands:
load_config(module, commands)
result['changed'] = True
running_config = None
startup_config = None
if module.params['save_when'] == 'always':
save_config(module, result)
elif module.params['save_when'] == 'modified':
running = get_running_config(module)
startup = get_startup_config(module)
running_config = NetworkConfig(indent=1, contents=running, ignore_lines=diff_ignore_lines)
startup_config = NetworkConfig(indent=1, contents=startup, ignore_lines=diff_ignore_lines)
if running_config.sha1 != startup_config.sha1:
save_config(module, result)
elif module.params['save_when'] == 'changed' and result['changed']:
save_config(module, result)
if module._diff:
if not running_config:
contents = get_running_config(module)
else:
contents = running_config.config_text
# recreate the object in order to process diff_ignore_lines
running_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if module.params['diff_against'] == 'running':
if module.check_mode:
module.warn("unable to perform diff against running-config due to check mode")
contents = None
else:
contents = config.config_text
elif module.params['diff_against'] == 'startup':
if not startup_config:
contents = get_startup_config(module)
else:
contents = startup_config.config_text
elif module.params['diff_against'] == 'intended':
contents = module.params['intended_config']
if contents is not None:
base_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if running_config.sha1 != base_config.sha1:
if module.params['diff_against'] == 'intended':
before = running_config
after = base_config
elif module.params['diff_against'] in ('startup', 'running'):
before = base_config
after = running_config
result.update({
'changed': True,
'diff': {'before': str(before), 'after': str(after)}
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
wkoathp/glance | glance/tests/unit/base.py | 5 | 2680 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import glance_store as store
from glance_store import location
from oslo.serialization import jsonutils
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_db import options
from glance.tests import stubs
from glance.tests import utils as test_utils
CONF = cfg.CONF
class StoreClearingUnitTest(test_utils.BaseTestCase):
def setUp(self):
super(StoreClearingUnitTest, self).setUp()
# Ensure stores + locations cleared
location.SCHEME_TO_CLS_MAP = {}
self._create_stores()
self.addCleanup(setattr, location, 'SCHEME_TO_CLS_MAP', dict())
def _create_stores(self, passing_config=True):
"""Create known stores. Mock out sheepdog's subprocess dependency
on collie.
:param passing_config: making store driver passes basic configurations.
:returns: the number of how many store drivers been loaded.
"""
store.register_opts(CONF)
store.create_stores(CONF)
class IsolatedUnitTest(StoreClearingUnitTest):
"""
Unit test case that establishes a mock environment within
a testing directory (in isolation)
"""
registry = None
def setUp(self):
super(IsolatedUnitTest, self).setUp()
options.set_defaults(CONF, connection='sqlite://',
sqlite_db='glance.sqlite')
lockutils.set_defaults(os.path.join(self.test_dir))
self.config(verbose=False,
debug=False)
self.config(default_store='filesystem',
filesystem_store_datadir=os.path.join(self.test_dir),
group="glance_store")
store.create_stores()
stubs.stub_out_registry_and_store_server(self.stubs,
self.test_dir,
registry=self.registry)
def set_policy_rules(self, rules):
fap = open(CONF.oslo_policy.policy_file, 'w')
fap.write(jsonutils.dumps(rules))
fap.close()
| apache-2.0 |
Svolcano/python_exercise | dianhua/worker/crawler/china_mobile/chongqing/des_js.py | 1 | 23915 | # -*- coding: utf-8 -*-
import execjs
def des_encode(encode_key, encode_data):
key_1 = str(encode_key[0:8])
key_2 = str(encode_key[1:9])
key_3 = str(encode_key[3:11])
des_js = execjs.compile(des_js_code)
encoded_string = des_js.call("strEnc", str(encode_data), key_1, key_2, key_3)
return encoded_string
des_js_code = """
function strEnc(data,firstKey,secondKey,thirdKey){
var leng = data.length;
var encData = "";
var firstKeyBt,secondKeyBt,thirdKeyBt,firstLength,secondLength,thirdLength;
if(firstKey != null && firstKey != ""){
firstKeyBt = getKeyBytes(firstKey);
firstLength = firstKeyBt.length;
}
if(secondKey != null && secondKey != ""){
secondKeyBt = getKeyBytes(secondKey);
secondLength = secondKeyBt.length;
}
if(thirdKey != null && thirdKey != ""){
thirdKeyBt = getKeyBytes(thirdKey);
thirdLength = thirdKeyBt.length;
}
if(leng > 0){
if(leng < 4){
var bt = strToBt(data);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x = 0;
tempBt = bt;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData = bt64ToHex(encByte);
}else{
var iterator = parseInt(leng/4);
var remainder = leng%4;
var i=0;
for(i = 0;i < iterator;i++){
var tempData = data.substring(i*4+0,i*4+4);
var tempByte = strToBt(tempData);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData += bt64ToHex(encByte);
}
if(remainder > 0){
var remainderData = data.substring(iterator*4+0,leng);
var tempByte = strToBt(remainderData);
var encByte ;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
for(z = 0;z < thirdLength ;z ++){
tempBt = enc(tempBt,thirdKeyBt[z]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
for(y = 0;y < secondLength ;y ++){
tempBt = enc(tempBt,secondKeyBt[y]);
}
encByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x;
tempBt = tempByte;
for(x = 0;x < firstLength ;x ++){
tempBt = enc(tempBt,firstKeyBt[x]);
}
encByte = tempBt;
}
}
}
encData += bt64ToHex(encByte);
}
}
}
return encData;
}
/*
* decrypt the encrypted string to the original string
*
* return the original string
*/
function strDec(data,firstKey,secondKey,thirdKey){
var leng = data.length;
var decStr = "";
var firstKeyBt,secondKeyBt,thirdKeyBt,firstLength,secondLength,thirdLength;
if(firstKey != null && firstKey != ""){
firstKeyBt = getKeyBytes(firstKey);
firstLength = firstKeyBt.length;
}
if(secondKey != null && secondKey != ""){
secondKeyBt = getKeyBytes(secondKey);
secondLength = secondKeyBt.length;
}
if(thirdKey != null && thirdKey != ""){
thirdKeyBt = getKeyBytes(thirdKey);
thirdLength = thirdKeyBt.length;
}
var iterator = parseInt(leng/16);
var i=0;
for(i = 0;i < iterator;i++){
var tempData = data.substring(i*16+0,i*16+16);
var strByte = hexToBt64(tempData);
var intByte = new Array(64);
var j = 0;
for(j = 0;j < 64; j++){
intByte[j] = parseInt(strByte.substring(j,j+1));
}
var decByte;
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != "" && thirdKey != null && thirdKey != ""){
var tempBt;
var x,y,z;
tempBt = intByte;
for(x = thirdLength - 1;x >= 0;x --){
tempBt = dec(tempBt,thirdKeyBt[x]);
}
for(y = secondLength - 1;y >= 0;y --){
tempBt = dec(tempBt,secondKeyBt[y]);
}
for(z = firstLength - 1;z >= 0 ;z --){
tempBt = dec(tempBt,firstKeyBt[z]);
}
decByte = tempBt;
}else{
if(firstKey != null && firstKey !="" && secondKey != null && secondKey != ""){
var tempBt;
var x,y,z;
tempBt = intByte;
for(x = secondLength - 1;x >= 0 ;x --){
tempBt = dec(tempBt,secondKeyBt[x]);
}
for(y = firstLength - 1;y >= 0 ;y --){
tempBt = dec(tempBt,firstKeyBt[y]);
}
decByte = tempBt;
}else{
if(firstKey != null && firstKey !=""){
var tempBt;
var x,y,z;
tempBt = intByte;
for(x = firstLength - 1;x >= 0 ;x --){
tempBt = dec(tempBt,firstKeyBt[x]);
}
decByte = tempBt;
}
}
}
decStr += byteToString(decByte);
}
return decStr;
}
/*
* chang the string into the bit array
*
* return bit array(it's length % 64 = 0)
*/
function getKeyBytes(key){
var keyBytes = new Array();
var leng = key.length;
var iterator = parseInt(leng/4);
var remainder = leng%4;
var i = 0;
for(i = 0;i < iterator; i ++){
keyBytes[i] = strToBt(key.substring(i*4+0,i*4+4));
}
if(remainder > 0){
keyBytes[i] = strToBt(key.substring(i*4+0,leng));
}
return keyBytes;
}
/*
* chang the string(it's length <= 4) into the bit array
*
* return bit array(it's length = 64)
*/
function strToBt(str){
var leng = str.length;
var bt = new Array(64);
if(leng < 4){
var i=0,j=0,p=0,q=0;
for(i = 0;i<leng;i++){
var k = str.charCodeAt(i);
for(j=0;j<16;j++){
var pow=1,m=0;
for(m=15;m>j;m--){
pow *= 2;
}
bt[16*i+j]=parseInt(k/pow)%2;
}
}
for(p = leng;p<4;p++){
var k = 0;
for(q=0;q<16;q++){
var pow=1,m=0;
for(m=15;m>q;m--){
pow *= 2;
}
bt[16*p+q]=parseInt(k/pow)%2;
}
}
}else{
for(i = 0;i<4;i++){
var k = str.charCodeAt(i);
for(j=0;j<16;j++){
var pow=1;
for(m=15;m>j;m--){
pow *= 2;
}
bt[16*i+j]=parseInt(k/pow)%2;
}
}
}
return bt;
}
/*
* chang the bit(it's length = 4) into the hex
*
* return hex
*/
function bt4ToHex(binary) {
var hex;
switch (binary) {
case "0000" : hex = "0"; break;
case "0001" : hex = "1"; break;
case "0010" : hex = "2"; break;
case "0011" : hex = "3"; break;
case "0100" : hex = "4"; break;
case "0101" : hex = "5"; break;
case "0110" : hex = "6"; break;
case "0111" : hex = "7"; break;
case "1000" : hex = "8"; break;
case "1001" : hex = "9"; break;
case "1010" : hex = "A"; break;
case "1011" : hex = "B"; break;
case "1100" : hex = "C"; break;
case "1101" : hex = "D"; break;
case "1110" : hex = "E"; break;
case "1111" : hex = "F"; break;
}
return hex;
}
/*
* chang the hex into the bit(it's length = 4)
*
* return the bit(it's length = 4)
*/
function hexToBt4(hex) {
var binary;
switch (hex) {
case "0" : binary = "0000"; break;
case "1" : binary = "0001"; break;
case "2" : binary = "0010"; break;
case "3" : binary = "0011"; break;
case "4" : binary = "0100"; break;
case "5" : binary = "0101"; break;
case "6" : binary = "0110"; break;
case "7" : binary = "0111"; break;
case "8" : binary = "1000"; break;
case "9" : binary = "1001"; break;
case "A" : binary = "1010"; break;
case "B" : binary = "1011"; break;
case "C" : binary = "1100"; break;
case "D" : binary = "1101"; break;
case "E" : binary = "1110"; break;
case "F" : binary = "1111"; break;
}
return binary;
}
/*
* chang the bit(it's length = 64) into the string
*
* return string
*/
function byteToString(byteData){
var str="";
for(i = 0;i<4;i++){
var count=0;
for(j=0;j<16;j++){
var pow=1;
for(m=15;m>j;m--){
pow*=2;
}
count+=byteData[16*i+j]*pow;
}
if(count != 0){
str+=String.fromCharCode(count);
}
}
return str;
}
function bt64ToHex(byteData){
var hex = "";
for(i = 0;i<16;i++){
var bt = "";
for(j=0;j<4;j++){
bt += byteData[i*4+j];
}
hex+=bt4ToHex(bt);
}
return hex;
}
function hexToBt64(hex){
var binary = "";
for(i = 0;i<16;i++){
binary+=hexToBt4(hex.substring(i,i+1));
}
return binary;
}
/*
* the 64 bit des core arithmetic
*/
function enc(dataByte,keyByte){
var keys = generateKeys(keyByte);
var ipByte = initPermute(dataByte);
var ipLeft = new Array(32);
var ipRight = new Array(32);
var tempLeft = new Array(32);
var i = 0,j = 0,k = 0,m = 0, n = 0;
for(k = 0;k < 32;k ++){
ipLeft[k] = ipByte[k];
ipRight[k] = ipByte[32+k];
}
for(i = 0;i < 16;i ++){
for(j = 0;j < 32;j ++){
tempLeft[j] = ipLeft[j];
ipLeft[j] = ipRight[j];
}
var key = new Array(48);
for(m = 0;m < 48;m ++){
key[m] = keys[i][m];
}
var tempRight = xor(pPermute(sBoxPermute(xor(expandPermute(ipRight),key))), tempLeft);
for(n = 0;n < 32;n ++){
ipRight[n] = tempRight[n];
}
}
var finalData =new Array(64);
for(i = 0;i < 32;i ++){
finalData[i] = ipRight[i];
finalData[32+i] = ipLeft[i];
}
return finallyPermute(finalData);
}
function dec(dataByte,keyByte){
var keys = generateKeys(keyByte);
var ipByte = initPermute(dataByte);
var ipLeft = new Array(32);
var ipRight = new Array(32);
var tempLeft = new Array(32);
var i = 0,j = 0,k = 0,m = 0, n = 0;
for(k = 0;k < 32;k ++){
ipLeft[k] = ipByte[k];
ipRight[k] = ipByte[32+k];
}
for(i = 15;i >= 0;i --){
for(j = 0;j < 32;j ++){
tempLeft[j] = ipLeft[j];
ipLeft[j] = ipRight[j];
}
var key = new Array(48);
for(m = 0;m < 48;m ++){
key[m] = keys[i][m];
}
var tempRight = xor(pPermute(sBoxPermute(xor(expandPermute(ipRight),key))), tempLeft);
for(n = 0;n < 32;n ++){
ipRight[n] = tempRight[n];
}
}
var finalData =new Array(64);
for(i = 0;i < 32;i ++){
finalData[i] = ipRight[i];
finalData[32+i] = ipLeft[i];
}
return finallyPermute(finalData);
}
function initPermute(originalData){
var ipByte = new Array(64);
for (i = 0, m = 1, n = 0; i < 4; i++, m += 2, n += 2) {
for (j = 7, k = 0; j >= 0; j--, k++) {
ipByte[i * 8 + k] = originalData[j * 8 + m];
ipByte[i * 8 + k + 32] = originalData[j * 8 + n];
}
}
return ipByte;
}
function expandPermute(rightData){
var epByte = new Array(48);
for (i = 0; i < 8; i++) {
if (i == 0) {
epByte[i * 6 + 0] = rightData[31];
} else {
epByte[i * 6 + 0] = rightData[i * 4 - 1];
}
epByte[i * 6 + 1] = rightData[i * 4 + 0];
epByte[i * 6 + 2] = rightData[i * 4 + 1];
epByte[i * 6 + 3] = rightData[i * 4 + 2];
epByte[i * 6 + 4] = rightData[i * 4 + 3];
if (i == 7) {
epByte[i * 6 + 5] = rightData[0];
} else {
epByte[i * 6 + 5] = rightData[i * 4 + 4];
}
}
return epByte;
}
function xor(byteOne,byteTwo){
var xorByte = new Array(byteOne.length);
for(i = 0;i < byteOne.length; i ++){
xorByte[i] = byteOne[i] ^ byteTwo[i];
}
return xorByte;
}
function sBoxPermute(expandByte){
var sBoxByte = new Array(32);
var binary = "";
var s1 = [
[14, 4, 13, 1, 2, 15, 11, 8, 3, 10, 6, 12, 5, 9, 0, 7],
[0, 15, 7, 4, 14, 2, 13, 1, 10, 6, 12, 11, 9, 5, 3, 8],
[4, 1, 14, 8, 13, 6, 2, 11, 15, 12, 9, 7, 3, 10, 5, 0],
[15, 12, 8, 2, 4, 9, 1, 7, 5, 11, 3, 14, 10, 0, 6, 13 ]];
/* Table - s2 */
var s2 = [
[15, 1, 8, 14, 6, 11, 3, 4, 9, 7, 2, 13, 12, 0, 5, 10],
[3, 13, 4, 7, 15, 2, 8, 14, 12, 0, 1, 10, 6, 9, 11, 5],
[0, 14, 7, 11, 10, 4, 13, 1, 5, 8, 12, 6, 9, 3, 2, 15],
[13, 8, 10, 1, 3, 15, 4, 2, 11, 6, 7, 12, 0, 5, 14, 9 ]];
/* Table - s3 */
var s3= [
[10, 0, 9, 14, 6, 3, 15, 5, 1, 13, 12, 7, 11, 4, 2, 8],
[13, 7, 0, 9, 3, 4, 6, 10, 2, 8, 5, 14, 12, 11, 15, 1],
[13, 6, 4, 9, 8, 15, 3, 0, 11, 1, 2, 12, 5, 10, 14, 7],
[1, 10, 13, 0, 6, 9, 8, 7, 4, 15, 14, 3, 11, 5, 2, 12 ]];
/* Table - s4 */
var s4 = [
[7, 13, 14, 3, 0, 6, 9, 10, 1, 2, 8, 5, 11, 12, 4, 15],
[13, 8, 11, 5, 6, 15, 0, 3, 4, 7, 2, 12, 1, 10, 14, 9],
[10, 6, 9, 0, 12, 11, 7, 13, 15, 1, 3, 14, 5, 2, 8, 4],
[3, 15, 0, 6, 10, 1, 13, 8, 9, 4, 5, 11, 12, 7, 2, 14 ]];
/* Table - s5 */
var s5 = [
[2, 12, 4, 1, 7, 10, 11, 6, 8, 5, 3, 15, 13, 0, 14, 9],
[14, 11, 2, 12, 4, 7, 13, 1, 5, 0, 15, 10, 3, 9, 8, 6],
[4, 2, 1, 11, 10, 13, 7, 8, 15, 9, 12, 5, 6, 3, 0, 14],
[11, 8, 12, 7, 1, 14, 2, 13, 6, 15, 0, 9, 10, 4, 5, 3 ]];
/* Table - s6 */
var s6 = [
[12, 1, 10, 15, 9, 2, 6, 8, 0, 13, 3, 4, 14, 7, 5, 11],
[10, 15, 4, 2, 7, 12, 9, 5, 6, 1, 13, 14, 0, 11, 3, 8],
[9, 14, 15, 5, 2, 8, 12, 3, 7, 0, 4, 10, 1, 13, 11, 6],
[4, 3, 2, 12, 9, 5, 15, 10, 11, 14, 1, 7, 6, 0, 8, 13 ]];
/* Table - s7 */
var s7 = [
[4, 11, 2, 14, 15, 0, 8, 13, 3, 12, 9, 7, 5, 10, 6, 1],
[13, 0, 11, 7, 4, 9, 1, 10, 14, 3, 5, 12, 2, 15, 8, 6],
[1, 4, 11, 13, 12, 3, 7, 14, 10, 15, 6, 8, 0, 5, 9, 2],
[6, 11, 13, 8, 1, 4, 10, 7, 9, 5, 0, 15, 14, 2, 3, 12]];
/* Table - s8 */
var s8 = [
[13, 2, 8, 4, 6, 15, 11, 1, 10, 9, 3, 14, 5, 0, 12, 7],
[1, 15, 13, 8, 10, 3, 7, 4, 12, 5, 6, 11, 0, 14, 9, 2],
[7, 11, 4, 1, 9, 12, 14, 2, 0, 6, 10, 13, 15, 3, 5, 8],
[2, 1, 14, 7, 4, 10, 8, 13, 15, 12, 9, 0, 3, 5, 6, 11]];
for(m=0;m<8;m++){
var i=0,j=0;
i = expandByte[m*6+0]*2+expandByte[m*6+5];
j = expandByte[m * 6 + 1] * 2 * 2 * 2
+ expandByte[m * 6 + 2] * 2* 2
+ expandByte[m * 6 + 3] * 2
+ expandByte[m * 6 + 4];
switch (m) {
case 0 :
binary = getBoxBinary(s1[i][j]);
break;
case 1 :
binary = getBoxBinary(s2[i][j]);
break;
case 2 :
binary = getBoxBinary(s3[i][j]);
break;
case 3 :
binary = getBoxBinary(s4[i][j]);
break;
case 4 :
binary = getBoxBinary(s5[i][j]);
break;
case 5 :
binary = getBoxBinary(s6[i][j]);
break;
case 6 :
binary = getBoxBinary(s7[i][j]);
break;
case 7 :
binary = getBoxBinary(s8[i][j]);
break;
}
sBoxByte[m*4+0] = parseInt(binary.substring(0,1));
sBoxByte[m*4+1] = parseInt(binary.substring(1,2));
sBoxByte[m*4+2] = parseInt(binary.substring(2,3));
sBoxByte[m*4+3] = parseInt(binary.substring(3,4));
}
return sBoxByte;
}
function pPermute(sBoxByte){
var pBoxPermute = new Array(32);
pBoxPermute[ 0] = sBoxByte[15];
pBoxPermute[ 1] = sBoxByte[ 6];
pBoxPermute[ 2] = sBoxByte[19];
pBoxPermute[ 3] = sBoxByte[20];
pBoxPermute[ 4] = sBoxByte[28];
pBoxPermute[ 5] = sBoxByte[11];
pBoxPermute[ 6] = sBoxByte[27];
pBoxPermute[ 7] = sBoxByte[16];
pBoxPermute[ 8] = sBoxByte[ 0];
pBoxPermute[ 9] = sBoxByte[14];
pBoxPermute[10] = sBoxByte[22];
pBoxPermute[11] = sBoxByte[25];
pBoxPermute[12] = sBoxByte[ 4];
pBoxPermute[13] = sBoxByte[17];
pBoxPermute[14] = sBoxByte[30];
pBoxPermute[15] = sBoxByte[ 9];
pBoxPermute[16] = sBoxByte[ 1];
pBoxPermute[17] = sBoxByte[ 7];
pBoxPermute[18] = sBoxByte[23];
pBoxPermute[19] = sBoxByte[13];
pBoxPermute[20] = sBoxByte[31];
pBoxPermute[21] = sBoxByte[26];
pBoxPermute[22] = sBoxByte[ 2];
pBoxPermute[23] = sBoxByte[ 8];
pBoxPermute[24] = sBoxByte[18];
pBoxPermute[25] = sBoxByte[12];
pBoxPermute[26] = sBoxByte[29];
pBoxPermute[27] = sBoxByte[ 5];
pBoxPermute[28] = sBoxByte[21];
pBoxPermute[29] = sBoxByte[10];
pBoxPermute[30] = sBoxByte[ 3];
pBoxPermute[31] = sBoxByte[24];
return pBoxPermute;
}
function finallyPermute(endByte){
var fpByte = new Array(64);
fpByte[ 0] = endByte[39];
fpByte[ 1] = endByte[ 7];
fpByte[ 2] = endByte[47];
fpByte[ 3] = endByte[15];
fpByte[ 4] = endByte[55];
fpByte[ 5] = endByte[23];
fpByte[ 6] = endByte[63];
fpByte[ 7] = endByte[31];
fpByte[ 8] = endByte[38];
fpByte[ 9] = endByte[ 6];
fpByte[10] = endByte[46];
fpByte[11] = endByte[14];
fpByte[12] = endByte[54];
fpByte[13] = endByte[22];
fpByte[14] = endByte[62];
fpByte[15] = endByte[30];
fpByte[16] = endByte[37];
fpByte[17] = endByte[ 5];
fpByte[18] = endByte[45];
fpByte[19] = endByte[13];
fpByte[20] = endByte[53];
fpByte[21] = endByte[21];
fpByte[22] = endByte[61];
fpByte[23] = endByte[29];
fpByte[24] = endByte[36];
fpByte[25] = endByte[ 4];
fpByte[26] = endByte[44];
fpByte[27] = endByte[12];
fpByte[28] = endByte[52];
fpByte[29] = endByte[20];
fpByte[30] = endByte[60];
fpByte[31] = endByte[28];
fpByte[32] = endByte[35];
fpByte[33] = endByte[ 3];
fpByte[34] = endByte[43];
fpByte[35] = endByte[11];
fpByte[36] = endByte[51];
fpByte[37] = endByte[19];
fpByte[38] = endByte[59];
fpByte[39] = endByte[27];
fpByte[40] = endByte[34];
fpByte[41] = endByte[ 2];
fpByte[42] = endByte[42];
fpByte[43] = endByte[10];
fpByte[44] = endByte[50];
fpByte[45] = endByte[18];
fpByte[46] = endByte[58];
fpByte[47] = endByte[26];
fpByte[48] = endByte[33];
fpByte[49] = endByte[ 1];
fpByte[50] = endByte[41];
fpByte[51] = endByte[ 9];
fpByte[52] = endByte[49];
fpByte[53] = endByte[17];
fpByte[54] = endByte[57];
fpByte[55] = endByte[25];
fpByte[56] = endByte[32];
fpByte[57] = endByte[ 0];
fpByte[58] = endByte[40];
fpByte[59] = endByte[ 8];
fpByte[60] = endByte[48];
fpByte[61] = endByte[16];
fpByte[62] = endByte[56];
fpByte[63] = endByte[24];
return fpByte;
}
function getBoxBinary(i) {
var binary = "";
switch (i) {
case 0 :binary = "0000";break;
case 1 :binary = "0001";break;
case 2 :binary = "0010";break;
case 3 :binary = "0011";break;
case 4 :binary = "0100";break;
case 5 :binary = "0101";break;
case 6 :binary = "0110";break;
case 7 :binary = "0111";break;
case 8 :binary = "1000";break;
case 9 :binary = "1001";break;
case 10 :binary = "1010";break;
case 11 :binary = "1011";break;
case 12 :binary = "1100";break;
case 13 :binary = "1101";break;
case 14 :binary = "1110";break;
case 15 :binary = "1111";break;
}
return binary;
}
/*
* generate 16 keys for xor
*
*/
function generateKeys(keyByte){
var key = new Array(56);
var keys = new Array();
keys[ 0] = new Array();
keys[ 1] = new Array();
keys[ 2] = new Array();
keys[ 3] = new Array();
keys[ 4] = new Array();
keys[ 5] = new Array();
keys[ 6] = new Array();
keys[ 7] = new Array();
keys[ 8] = new Array();
keys[ 9] = new Array();
keys[10] = new Array();
keys[11] = new Array();
keys[12] = new Array();
keys[13] = new Array();
keys[14] = new Array();
keys[15] = new Array();
var loop = [1,1,2,2,2,2,2,2,1,2,2,2,2,2,2,1];
for(i=0;i<7;i++){
for(j=0,k=7;j<8;j++,k--){
key[i*8+j]=keyByte[8*k+i];
}
}
var i = 0;
for(i = 0;i < 16;i ++){
var tempLeft=0;
var tempRight=0;
for(j = 0; j < loop[i];j ++){
tempLeft = key[0];
tempRight = key[28];
for(k = 0;k < 27 ;k ++){
key[k] = key[k+1];
key[28+k] = key[29+k];
}
key[27]=tempLeft;
key[55]=tempRight;
}
var tempKey = new Array(48);
tempKey[ 0] = key[13];
tempKey[ 1] = key[16];
tempKey[ 2] = key[10];
tempKey[ 3] = key[23];
tempKey[ 4] = key[ 0];
tempKey[ 5] = key[ 4];
tempKey[ 6] = key[ 2];
tempKey[ 7] = key[27];
tempKey[ 8] = key[14];
tempKey[ 9] = key[ 5];
tempKey[10] = key[20];
tempKey[11] = key[ 9];
tempKey[12] = key[22];
tempKey[13] = key[18];
tempKey[14] = key[11];
tempKey[15] = key[ 3];
tempKey[16] = key[25];
tempKey[17] = key[ 7];
tempKey[18] = key[15];
tempKey[19] = key[ 6];
tempKey[20] = key[26];
tempKey[21] = key[19];
tempKey[22] = key[12];
tempKey[23] = key[ 1];
tempKey[24] = key[40];
tempKey[25] = key[51];
tempKey[26] = key[30];
tempKey[27] = key[36];
tempKey[28] = key[46];
tempKey[29] = key[54];
tempKey[30] = key[29];
tempKey[31] = key[39];
tempKey[32] = key[50];
tempKey[33] = key[44];
tempKey[34] = key[32];
tempKey[35] = key[47];
tempKey[36] = key[43];
tempKey[37] = key[48];
tempKey[38] = key[38];
tempKey[39] = key[55];
tempKey[40] = key[33];
tempKey[41] = key[52];
tempKey[42] = key[45];
tempKey[43] = key[41];
tempKey[44] = key[49];
tempKey[45] = key[35];
tempKey[46] = key[28];
tempKey[47] = key[31];
switch(i){
case 0: for(m=0;m < 48 ;m++){ keys[ 0][m] = tempKey[m]; } break;
case 1: for(m=0;m < 48 ;m++){ keys[ 1][m] = tempKey[m]; } break;
case 2: for(m=0;m < 48 ;m++){ keys[ 2][m] = tempKey[m]; } break;
case 3: for(m=0;m < 48 ;m++){ keys[ 3][m] = tempKey[m]; } break;
case 4: for(m=0;m < 48 ;m++){ keys[ 4][m] = tempKey[m]; } break;
case 5: for(m=0;m < 48 ;m++){ keys[ 5][m] = tempKey[m]; } break;
case 6: for(m=0;m < 48 ;m++){ keys[ 6][m] = tempKey[m]; } break;
case 7: for(m=0;m < 48 ;m++){ keys[ 7][m] = tempKey[m]; } break;
case 8: for(m=0;m < 48 ;m++){ keys[ 8][m] = tempKey[m]; } break;
case 9: for(m=0;m < 48 ;m++){ keys[ 9][m] = tempKey[m]; } break;
case 10: for(m=0;m < 48 ;m++){ keys[10][m] = tempKey[m]; } break;
case 11: for(m=0;m < 48 ;m++){ keys[11][m] = tempKey[m]; } break;
case 12: for(m=0;m < 48 ;m++){ keys[12][m] = tempKey[m]; } break;
case 13: for(m=0;m < 48 ;m++){ keys[13][m] = tempKey[m]; } break;
case 14: for(m=0;m < 48 ;m++){ keys[14][m] = tempKey[m]; } break;
case 15: for(m=0;m < 48 ;m++){ keys[15][m] = tempKey[m]; } break;
}
}
return keys;
}
"""
| mit |
efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/python2.7/encodings/cp1258.py | 593 | 13620 | """ Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1258',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\u02dc' # 0x98 -> SMALL TILDE
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u0300' # 0xCC -> COMBINING GRAVE ACCENT
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\u0309' # 0xD2 -> COMBINING HOOK ABOVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u01a0' # 0xD5 -> LATIN CAPITAL LETTER O WITH HORN
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u01af' # 0xDD -> LATIN CAPITAL LETTER U WITH HORN
u'\u0303' # 0xDE -> COMBINING TILDE
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0301' # 0xEC -> COMBINING ACUTE ACCENT
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\u0323' # 0xF2 -> COMBINING DOT BELOW
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u01a1' # 0xF5 -> LATIN SMALL LETTER O WITH HORN
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u01b0' # 0xFD -> LATIN SMALL LETTER U WITH HORN
u'\u20ab' # 0xFE -> DONG SIGN
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
TheWardoctor/Wardoctors-repo | script.module.schism.common/lib/bs4/tests/test_docs.py | 607 | 1067 | "Test harness for doctests."
# pylint: disable-msg=E0611,W0142
__metaclass__ = type
__all__ = [
'additional_tests',
]
import atexit
import doctest
import os
#from pkg_resources import (
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
import unittest
DOCTEST_FLAGS = (
doctest.ELLIPSIS |
doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_NDIFF)
# def additional_tests():
# "Run the doc tests (README.txt and docs/*, if any exist)"
# doctest_files = [
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
# if resource_exists('bs4', 'docs'):
# for name in resource_listdir('bs4', 'docs'):
# if name.endswith('.txt'):
# doctest_files.append(
# os.path.abspath(
# resource_filename('bs4', 'docs/%s' % name)))
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
# atexit.register(cleanup_resources)
# return unittest.TestSuite((
# doctest.DocFileSuite(*doctest_files, **kwargs)))
| apache-2.0 |
tersmitten/ansible | lib/ansible/modules/network/panos/_panos_nat_rule.py | 41 | 16688 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: panos_nat_rule
short_description: create a policy NAT rule
description: >
- Create a policy nat rule. Keep in mind that we can either end up configuring source NAT, destination NAT, or
both. Instead of splitting it into two we will make a fair attempt to determine which one the user wants.
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer), Robert Hagen (@rnh556)"
version_added: "2.4"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.org/project/pan-python/)
- pandevice can be obtained from PyPI U(https://pypi.org/project/pandevice/)
deprecated:
alternative: Use U(https://galaxy.ansible.com/PaloAltoNetworks/paloaltonetworks) instead.
removed_in: "2.12"
why: Consolidating code base.
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
username:
description:
- Username credentials to use for auth unless I(api_key) is set.
default: "admin"
password:
description:
- Password credentials to use for auth unless I(api_key) is set.
required: true
api_key:
description:
- API key that can be used instead of I(username)/I(password) credentials.
operation:
description:
- The action to be taken. Supported values are I(add)/I(update)/I(find)/I(delete).
required: true
choices:
- add
- update
- delete
- find
devicegroup:
description:
- If Panorama, the device group to put this rule in.
rule_name:
description:
- name of the SNAT rule
required: true
description:
description:
- The description
source_zone:
description:
- list of source zones
required: true
destination_zone:
description:
- destination zone
required: true
source_ip:
description:
- list of source addresses
default: ["any"]
destination_ip:
description:
- list of destination addresses
default: ["any"]
service:
description:
- service
default: "any"
snat_type:
description:
- type of source translation
choices:
- static-ip
- dynamic-ip-and-port
- dynamic-ip
snat_address_type:
description:
- type of source translation. Supported values are I(translated-address)/I(translated-address).
default: 'interface-address'
choices:
- interface-address
- translated-address
snat_static_address:
description:
- Source NAT translated address. Used with Static-IP translation.
snat_dynamic_address:
description:
- Source NAT translated address. Used with Dynamic-IP and Dynamic-IP-and-Port.
snat_interface:
description:
- snat interface
snat_interface_address:
description:
- snat interface address
snat_bidirectional:
description:
- bidirectional flag
type: bool
default: 'no'
dnat_address:
description:
- dnat translated address
dnat_port:
description:
- dnat translated port
tag_name:
description:
- Tag for the NAT rule.
to_interface:
description:
- Destination interface.
default: 'any'
commit:
description:
- Commit configuration if changed.
type: bool
default: 'yes'
'''
EXAMPLES = '''
# Create a source and destination nat rule
- name: Create NAT SSH rule for 10.0.1.101
panos_nat_rule:
ip_address: '{{ ip_address }}'
username: '{{ username }}'
password: '{{ password }}'
rule_name: "Web SSH"
source_zone: ["external"]
destination_zone: "external"
source: ["any"]
destination: ["10.0.0.100"]
service: "service-tcp-221"
snat_type: "dynamic-ip-and-port"
snat_interface: "ethernet1/2"
dnat_address: "10.0.1.101"
dnat_port: "22"
'''
RETURN = '''
# Default return values
'''
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
# import pydevd
# pydevd.settrace('localhost', port=60374, stdoutToServer=True, stderrToServer=True)
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
try:
import pan.xapi
from pan.xapi import PanXapiError
import pandevice
from pandevice import base
from pandevice import firewall
from pandevice import panorama
from pandevice import objects
from pandevice import policies
import xmltodict
import json
HAS_LIB = True
except ImportError:
HAS_LIB = False
def get_devicegroup(device, devicegroup):
dg_list = device.refresh_devices()
for group in dg_list:
if isinstance(group, pandevice.panorama.DeviceGroup):
if group.name == devicegroup:
return group
return False
def get_rulebase(device, devicegroup):
# Build the rulebase
if isinstance(device, pandevice.firewall.Firewall):
rulebase = pandevice.policies.Rulebase()
device.add(rulebase)
elif isinstance(device, pandevice.panorama.Panorama):
dg = panorama.DeviceGroup(devicegroup)
device.add(dg)
rulebase = policies.PreRulebase()
dg.add(rulebase)
else:
return False
policies.NatRule.refreshall(rulebase)
return rulebase
def find_rule(rulebase, rule_name):
# Search for the rule name
rule = rulebase.find(rule_name)
if rule:
return rule
else:
return False
def create_nat_rule(**kwargs):
nat_rule = policies.NatRule(
name=kwargs['rule_name'],
description=kwargs['description'],
fromzone=kwargs['source_zone'],
source=kwargs['source_ip'],
tozone=kwargs['destination_zone'],
destination=kwargs['destination_ip'],
service=kwargs['service'],
to_interface=kwargs['to_interface'],
nat_type=kwargs['nat_type']
)
# Source translation: Static IP
if kwargs['snat_type'] in ['static-ip'] and kwargs['snat_static_address']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_static_translated_address = kwargs['snat_static_address']
# Bi-directional flag set?
if kwargs['snat_bidirectional']:
nat_rule.source_translation_static_bi_directional = kwargs['snat_bidirectional']
# Source translation: Dynamic IP and port
elif kwargs['snat_type'] in ['dynamic-ip-and-port']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_address_type = kwargs['snat_address_type']
# Interface address?
if kwargs['snat_interface']:
nat_rule.source_translation_interface = kwargs['snat_interface']
# Interface IP?
if kwargs['snat_interface_address']:
nat_rule.source_translation_ip_address = kwargs['snat_interface_address']
else:
nat_rule.source_translation_translated_addresses = kwargs['snat_dynamic_address']
# Source translation: Dynamic IP
elif kwargs['snat_type'] in ['dynamic-ip']:
if kwargs['snat_dynamic_address']:
nat_rule.source_translation_type = kwargs['snat_type']
nat_rule.source_translation_translated_addresses = kwargs['snat_dynamic_address']
else:
return False
# Destination translation
if kwargs['dnat_address']:
nat_rule.destination_translated_address = kwargs['dnat_address']
if kwargs['dnat_port']:
nat_rule.destination_translated_port = kwargs['dnat_port']
# Any tags?
if 'tag_name' in kwargs:
nat_rule.tag = kwargs['tag_name']
return nat_rule
def add_rule(rulebase, nat_rule):
if rulebase:
rulebase.add(nat_rule)
nat_rule.create()
return True
else:
return False
def update_rule(rulebase, nat_rule):
if rulebase:
rulebase.add(nat_rule)
nat_rule.apply()
return True
else:
return False
def main():
argument_spec = dict(
ip_address=dict(required=True),
username=dict(default='admin'),
password=dict(required=True, no_log=True),
api_key=dict(no_log=True),
operation=dict(required=True, choices=['add', 'update', 'delete', 'find']),
rule_name=dict(required=True),
description=dict(),
tag_name=dict(),
source_zone=dict(type='list'),
source_ip=dict(type='list', default=['any']),
destination_zone=dict(),
destination_ip=dict(type='list', default=['any']),
service=dict(default='any'),
to_interface=dict(default='any'),
snat_type=dict(choices=['static-ip', 'dynamic-ip-and-port', 'dynamic-ip']),
snat_address_type=dict(choices=['interface-address', 'translated-address'], default='interface-address'),
snat_static_address=dict(),
snat_dynamic_address=dict(type='list'),
snat_interface=dict(),
snat_interface_address=dict(),
snat_bidirectional=dict(type='bool', default=False),
dnat_address=dict(),
dnat_port=dict(),
devicegroup=dict(),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['api_key', 'password']])
if not HAS_LIB:
module.fail_json(msg='Missing required libraries.')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
api_key = module.params['api_key']
operation = module.params['operation']
rule_name = module.params['rule_name']
description = module.params['description']
tag_name = module.params['tag_name']
source_zone = module.params['source_zone']
source_ip = module.params['source_ip']
destination_zone = module.params['destination_zone']
destination_ip = module.params['destination_ip']
service = module.params['service']
to_interface = module.params['to_interface']
nat_type = 'ipv4'
snat_type = module.params['snat_type']
snat_address_type = module.params['snat_address_type']
snat_static_address = module.params['snat_static_address']
snat_dynamic_address = module.params['snat_dynamic_address']
snat_interface = module.params['snat_interface']
snat_interface_address = module.params['snat_interface_address']
snat_bidirectional = module.params['snat_bidirectional']
dnat_address = module.params['dnat_address']
dnat_port = module.params['dnat_port']
devicegroup = module.params['devicegroup']
commit = module.params['commit']
# Create the device with the appropriate pandevice type
device = base.PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
# If Panorama, validate the devicegroup
dev_group = None
if devicegroup and isinstance(device, panorama.Panorama):
dev_group = get_devicegroup(device, devicegroup)
if dev_group:
device.add(dev_group)
else:
module.fail_json(msg='\'%s\' device group not found in Panorama. Is the name correct?' % devicegroup)
# Get the rulebase
rulebase = get_rulebase(device, dev_group)
# Which action shall we take on the object?
if operation == "find":
# Search for the rule
match = find_rule(rulebase, rule_name)
# If found, format and return the result
if match:
match_dict = xmltodict.parse(match.element_str())
module.exit_json(
stdout_lines=json.dumps(match_dict, indent=2),
msg='Rule matched'
)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "delete":
# Search for the object
match = find_rule(rulebase, rule_name)
# If found, delete it
if match:
try:
match.delete()
if commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=True, msg='Rule \'%s\' successfully deleted.' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' not found. Is the name correct?' % rule_name)
elif operation == "add":
# Look for required parameters
if source_zone and destination_zone and nat_type:
pass
else:
module.fail_json(msg='Missing parameter. Required: source_zone, destination_zone, nat_type')
# Search for the rule. Fail if found.
match = find_rule(rulebase, rule_name)
if match:
module.fail_json(msg='Rule \'%s\' already exists. Use operation: \'update\' to change it.' % rule_name)
else:
try:
new_rule = create_nat_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
destination_ip=destination_ip,
service=service,
to_interface=to_interface,
nat_type=nat_type,
snat_type=snat_type,
snat_address_type=snat_address_type,
snat_static_address=snat_static_address,
snat_dynamic_address=snat_dynamic_address,
snat_interface=snat_interface,
snat_interface_address=snat_interface_address,
snat_bidirectional=snat_bidirectional,
dnat_address=dnat_address,
dnat_port=dnat_port
)
changed = add_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=changed, msg='Rule \'%s\' successfully added.' % rule_name)
elif operation == 'update':
# Search for the rule. Update if found.
match = find_rule(rulebase, rule_name)
if match:
try:
new_rule = create_nat_rule(
rule_name=rule_name,
description=description,
tag_name=tag_name,
source_zone=source_zone,
destination_zone=destination_zone,
source_ip=source_ip,
destination_ip=destination_ip,
service=service,
to_interface=to_interface,
nat_type=nat_type,
snat_type=snat_type,
snat_address_type=snat_address_type,
snat_static_address=snat_static_address,
snat_dynamic_address=snat_dynamic_address,
snat_interface=snat_interface,
snat_interface_address=snat_interface_address,
snat_bidirectional=snat_bidirectional,
dnat_address=dnat_address,
dnat_port=dnat_port
)
changed = update_rule(rulebase, new_rule)
if changed and commit:
device.commit(sync=True)
except PanXapiError as exc:
module.fail_json(msg=to_native(exc))
module.exit_json(changed=changed, msg='Rule \'%s\' successfully updated.' % rule_name)
else:
module.fail_json(msg='Rule \'%s\' does not exist. Use operation: \'add\' to add it.' % rule_name)
if __name__ == '__main__':
main()
| gpl-3.0 |
falbassini/googleads-dfa-reporting-samples | python/v2.2/target_ad_to_remarketing_list.py | 3 | 2651 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example targets an ad to a remarketing list.
The first targetable remarketing list, either owned by or shared to the ad's
advertiser, will be used. To create a remarketing list, see
create_remarketing_list.py. To share a remarketing list with the ad's
advertiser, see share_remarketing_list_to_advertiser.py.
"""
import argparse
import sys
from apiclient import sample_tools
from oauth2client import client
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to use for targeting')
argparser.add_argument('ad_id', type=int, help='The ID of the ad to target')
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'dfareporting', 'v2.2', __doc__, __file__, parents=[argparser],
scope=['https://www.googleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/dfatrafficking'])
profile_id = flags.profile_id
ad_id = flags.ad_id
try:
# Retrieve the ad.
ad = service.ads().get(profileId=profile_id, id=ad_id).execute()
# Retrieve a single targetable remarketing list for the ad.
lists = service.targetableRemarketingLists().list(
profileId=profile_id, advertiserId=ad['advertiserId'],
maxResults=1).execute()
if lists['targetableRemarketingLists']:
list = lists['targetableRemarketingLists'][0]
# Update the ad with a list targeting expression
ad['remarketing_list_expression'] = { 'expression': list['id'] }
response = service.ads().update(profileId=profile_id, body=ad).execute()
print ('Ad %s updated to use remarketing list expression: "%s".'
% (response['id'],
response['remarketing_list_expression']['expression']))
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 |
CasparLi/calibre | src/calibre/gui2/viewer/image_popup.py | 14 | 5379 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
from PyQt5.Qt import (QDialog, QPixmap, QUrl, QScrollArea, QLabel, QSizePolicy,
QDialogButtonBox, QVBoxLayout, QPalette, QApplication, QSize, QIcon,
Qt, QTransform)
from calibre.gui2 import choose_save_file, gprefs
class ImageView(QDialog):
def __init__(self, parent, current_img, current_url, geom_name='viewer_image_popup_geometry'):
QDialog.__init__(self)
dw = QApplication.instance().desktop()
self.avail_geom = dw.availableGeometry(parent if parent is not None else self)
self.current_img = current_img
self.current_url = current_url
self.factor = 1.0
self.geom_name = geom_name
self.label = l = QLabel()
l.setBackgroundRole(QPalette.Base)
l.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
l.setScaledContents(True)
self.scrollarea = sa = QScrollArea()
sa.setBackgroundRole(QPalette.Dark)
sa.setWidget(l)
self.bb = bb = QDialogButtonBox(QDialogButtonBox.Close)
bb.accepted.connect(self.accept)
bb.rejected.connect(self.reject)
self.zi_button = zi = bb.addButton(_('Zoom &in'), bb.ActionRole)
self.zo_button = zo = bb.addButton(_('Zoom &out'), bb.ActionRole)
self.save_button = so = bb.addButton(_('&Save as'), bb.ActionRole)
self.rotate_button = ro = bb.addButton(_('&Rotate'), bb.ActionRole)
zi.setIcon(QIcon(I('plus.png')))
zo.setIcon(QIcon(I('minus.png')))
so.setIcon(QIcon(I('save.png')))
ro.setIcon(QIcon(I('rotate-right.png')))
zi.clicked.connect(self.zoom_in)
zo.clicked.connect(self.zoom_out)
so.clicked.connect(self.save_image)
ro.clicked.connect(self.rotate_image)
self.l = l = QVBoxLayout()
self.setLayout(l)
l.addWidget(sa)
l.addWidget(bb)
def zoom_in(self):
self.factor *= 1.25
self.adjust_image(1.25)
def zoom_out(self):
self.factor *= 0.8
self.adjust_image(0.8)
def save_image(self):
filters=[('Images', ['png', 'jpeg', 'jpg'])]
f = choose_save_file(self, 'viewer image view save dialog',
_('Choose a file to save to'), filters=filters,
all_files=False)
if f:
self.current_img.save(f)
def adjust_image(self, factor):
self.label.resize(self.factor * self.current_img.size())
self.zi_button.setEnabled(self.factor <= 3)
self.zo_button.setEnabled(self.factor >= 0.3333)
self.adjust_scrollbars(factor)
def adjust_scrollbars(self, factor):
for sb in (self.scrollarea.horizontalScrollBar(),
self.scrollarea.verticalScrollBar()):
sb.setValue(int(factor*sb.value()) + ((factor - 1) * sb.pageStep()/2))
def rotate_image(self):
pm = self.label.pixmap()
t = QTransform()
t.rotate(90)
pm = self.current_img = pm.transformed(t)
self.label.setPixmap(pm)
self.label.adjustSize()
self.factor = 1
for sb in (self.scrollarea.horizontalScrollBar(),
self.scrollarea.verticalScrollBar()):
sb.setValue(0)
def __call__(self, use_exec=False):
geom = self.avail_geom
self.label.setPixmap(self.current_img)
self.label.adjustSize()
self.resize(QSize(int(geom.width()/2.5), geom.height()-50))
geom = gprefs.get(self.geom_name, None)
if geom is not None:
self.restoreGeometry(geom)
try:
self.current_image_name = unicode(self.current_url.toString(QUrl.None)).rpartition('/')[-1]
except AttributeError:
self.current_image_name = self.current_url
title = _('View Image: %s')%self.current_image_name
self.setWindowTitle(title)
if use_exec:
self.exec_()
else:
self.show()
def done(self, e):
gprefs[self.geom_name] = bytearray(self.saveGeometry())
return QDialog.done(self, e)
def wheelEvent(self, event):
d = event.angleDelta().y()
if abs(d) > 0 and not self.scrollarea.verticalScrollBar().isVisible():
event.accept()
(self.zoom_out if d < 0 else self.zoom_in)()
class ImagePopup(object):
def __init__(self, parent):
self.current_img = QPixmap()
self.current_url = QUrl()
self.parent = parent
self.dialogs = []
def __call__(self):
if self.current_img.isNull():
return
d = ImageView(self.parent, self.current_img, self.current_url)
self.dialogs.append(d)
d.finished.connect(self.cleanup, type=Qt.QueuedConnection)
d()
def cleanup(self):
for d in tuple(self.dialogs):
if not d.isVisible():
self.dialogs.remove(d)
if __name__ == '__main__':
import sys
app = QApplication([])
p = QPixmap()
p.load(sys.argv[-1])
u = QUrl.fromLocalFile(sys.argv[-1])
d = ImageView(None, p, u)
d()
app.exec_()
| gpl-3.0 |
perkinslr/pypyjs | addedLibraries/twisted/internet/kqreactor.py | 28 | 9720 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A kqueue()/kevent() based implementation of the Twisted main loop.
To use this reactor, start your application specifying the kqueue reactor::
twistd --reactor kqueue ...
To install the event loop from code (and you should do this before any
connections, listeners or connectors are added)::
from twisted.internet import kqreactor
kqreactor.install()
This implementation depends on Python 2.6 or higher which has kqueue support
built in the select module.
Note, that you should use Python 2.6.5 or higher, since previous implementations
of select.kqueue had U{http://bugs.python.org/issue5910} not yet fixed.
"""
import errno
from zope.interface import implements
from select import kqueue, kevent
from select import KQ_FILTER_READ, KQ_FILTER_WRITE
from select import KQ_EV_DELETE, KQ_EV_ADD, KQ_EV_EOF
from twisted.internet.interfaces import IReactorFDSet, IReactorDaemonize
from twisted.python import log, failure
from twisted.internet import main, posixbase
class KQueueReactor(posixbase.PosixReactorBase):
"""
A reactor that uses kqueue(2)/kevent(2) and relies on Python 2.6 or higher
which has built in support for kqueue in the select module.
@ivar _kq: A L{kqueue} which will be used to check for I/O readiness.
@ivar _selectables: A dictionary mapping integer file descriptors to
instances of L{FileDescriptor} which have been registered with the
reactor. All L{FileDescriptors} which are currently receiving read or
write readiness notifications will be present as values in this
dictionary.
@ivar _reads: A set containing integer file descriptors. Values in this
set will be registered with C{_kq} for read readiness notifications
which will be dispatched to the corresponding L{FileDescriptor}
instances in C{_selectables}.
@ivar _writes: A set containing integer file descriptors. Values in this
set will be registered with C{_kq} for write readiness notifications
which will be dispatched to the corresponding L{FileDescriptor}
instances in C{_selectables}.
"""
implements(IReactorFDSet, IReactorDaemonize)
def __init__(self):
"""
Initialize kqueue object, file descriptor tracking dictionaries, and the
base class.
See:
- http://docs.python.org/library/select.html
- www.freebsd.org/cgi/man.cgi?query=kqueue
- people.freebsd.org/~jlemon/papers/kqueue.pdf
"""
self._kq = kqueue()
self._reads = set()
self._writes = set()
self._selectables = {}
posixbase.PosixReactorBase.__init__(self)
def _updateRegistration(self, fd, filter, op):
"""
Private method for changing kqueue registration on a given FD
filtering for events given filter/op. This will never block and
returns nothing.
"""
self._kq.control([kevent(fd, filter, op)], 0, 0)
def beforeDaemonize(self):
"""
Implement L{IReactorDaemonize.beforeDaemonize}.
"""
# Twisted-internal method called during daemonization (when application
# is started via twistd). This is called right before the magic double
# forking done for daemonization. We cleanly close the kqueue() and later
# recreate it. This is needed since a) kqueue() are not inherited across
# forks and b) twistd will create the reactor already before daemonization
# (and will also add at least 1 reader to the reactor, an instance of
# twisted.internet.posixbase._UnixWaker).
#
# See: twisted.scripts._twistd_unix.daemonize()
self._kq.close()
self._kq = None
def afterDaemonize(self):
"""
Implement L{IReactorDaemonize.afterDaemonize}.
"""
# Twisted-internal method called during daemonization. This is called right
# after daemonization and recreates the kqueue() and any readers/writers
# that were added before. Note that you MUST NOT call any reactor methods
# in between beforeDaemonize() and afterDaemonize()!
self._kq = kqueue()
for fd in self._reads:
self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_ADD)
for fd in self._writes:
self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_ADD)
def addReader(self, reader):
"""
Implement L{IReactorFDSet.addReader}.
"""
fd = reader.fileno()
if fd not in self._reads:
try:
self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_ADD)
except OSError:
pass
finally:
self._selectables[fd] = reader
self._reads.add(fd)
def addWriter(self, writer):
"""
Implement L{IReactorFDSet.addWriter}.
"""
fd = writer.fileno()
if fd not in self._writes:
try:
self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_ADD)
except OSError:
pass
finally:
self._selectables[fd] = writer
self._writes.add(fd)
def removeReader(self, reader):
"""
Implement L{IReactorFDSet.removeReader}.
"""
wasLost = False
try:
fd = reader.fileno()
except:
fd = -1
if fd == -1:
for fd, fdes in self._selectables.items():
if reader is fdes:
wasLost = True
break
else:
return
if fd in self._reads:
self._reads.remove(fd)
if fd not in self._writes:
del self._selectables[fd]
if not wasLost:
try:
self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_DELETE)
except OSError:
pass
def removeWriter(self, writer):
"""
Implement L{IReactorFDSet.removeWriter}.
"""
wasLost = False
try:
fd = writer.fileno()
except:
fd = -1
if fd == -1:
for fd, fdes in self._selectables.items():
if writer is fdes:
wasLost = True
break
else:
return
if fd in self._writes:
self._writes.remove(fd)
if fd not in self._reads:
del self._selectables[fd]
if not wasLost:
try:
self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_DELETE)
except OSError:
pass
def removeAll(self):
"""
Implement L{IReactorFDSet.removeAll}.
"""
return self._removeAll(
[self._selectables[fd] for fd in self._reads],
[self._selectables[fd] for fd in self._writes])
def getReaders(self):
"""
Implement L{IReactorFDSet.getReaders}.
"""
return [self._selectables[fd] for fd in self._reads]
def getWriters(self):
"""
Implement L{IReactorFDSet.getWriters}.
"""
return [self._selectables[fd] for fd in self._writes]
def doKEvent(self, timeout):
"""
Poll the kqueue for new events.
"""
if timeout is None:
timeout = 1
try:
l = self._kq.control([], len(self._selectables), timeout)
except OSError, e:
if e[0] == errno.EINTR:
return
else:
raise
_drdw = self._doWriteOrRead
for event in l:
fd = event.ident
try:
selectable = self._selectables[fd]
except KeyError:
# Handles the infrequent case where one selectable's
# handler disconnects another.
continue
else:
log.callWithLogger(selectable, _drdw, selectable, fd, event)
def _doWriteOrRead(self, selectable, fd, event):
"""
Private method called when a FD is ready for reading, writing or was
lost. Do the work and raise errors where necessary.
"""
why = None
inRead = False
(filter, flags, data, fflags) = (
event.filter, event.flags, event.data, event.fflags)
if flags & KQ_EV_EOF and data and fflags:
why = main.CONNECTION_LOST
else:
try:
if selectable.fileno() == -1:
inRead = False
why = posixbase._NO_FILEDESC
else:
if filter == KQ_FILTER_READ:
inRead = True
why = selectable.doRead()
if filter == KQ_FILTER_WRITE:
inRead = False
why = selectable.doWrite()
except:
# Any exception from application code gets logged and will
# cause us to disconnect the selectable.
why = failure.Failure()
log.err(why, "An exception was raised from application code" \
" while processing a reactor selectable")
if why:
self._disconnectSelectable(selectable, why, inRead)
doIteration = doKEvent
def install():
"""
Install the kqueue() reactor.
"""
p = KQueueReactor()
from twisted.internet.main import installReactor
installReactor(p)
__all__ = ["KQueueReactor", "install"]
| mit |
alsrgv/tensorflow | tensorflow/contrib/framework/python/ops/variables_test.py | 23 | 59248 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""variables tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.framework.python.ops import variables as variables_lib2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.training import device_setter
from tensorflow.python.training import saver as saver_lib
class LocalVariableTest(test.TestCase):
def test_local_variable(self):
with self.cached_session() as sess:
self.assertEquals([], variables_lib.local_variables())
value0 = 42
variables_lib2.local_variable(value0)
value1 = 43
variables_lib2.local_variable(value1)
variables = variables_lib.local_variables()
self.assertEquals(2, len(variables))
self.assertRaises(errors_impl.OpError, sess.run, variables)
variables_lib.variables_initializer(variables).run()
self.assertAllEqual(set([value0, value1]), set(sess.run(variables)))
def testLocalVariableNameAndShape(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.local_variable([1, 1, 1, 1, 1], name='a')
self.assertEquals(a.op.name, 'A/a')
self.assertListEqual(a.get_shape().as_list(), [5])
self.assertListEqual([a], variables_lib2.get_local_variables())
def testLocalVariableNotInAllVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.local_variable(0)
self.assertFalse(a in variables_lib.global_variables())
self.assertTrue(a in variables_lib.local_variables())
def testLocalVariableNotInVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.local_variable(0)
self.assertFalse(a in variables_lib2.get_variables_to_restore())
self.assertTrue(a in variables_lib.local_variables())
def testGetVariablesDontReturnsTransients(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
variables_lib2.local_variable(0)
with variable_scope.variable_scope('B'):
variables_lib2.local_variable(0)
self.assertEquals([], variables_lib2.get_variables('A'))
self.assertEquals([], variables_lib2.get_variables('B'))
def testGetLocalVariablesReturnsTransients(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.local_variable(0)
with variable_scope.variable_scope('B'):
b = variables_lib2.local_variable(0)
self.assertEquals([a], variables_lib2.get_local_variables('A'))
self.assertEquals([b], variables_lib2.get_local_variables('B'))
def testInitializedVariableValue(self):
with self.cached_session() as sess:
a = variables_lib2.local_variable([0, 0, 0, 0, 0], name='a')
sess.run(variables_lib.local_variables_initializer())
self.assertAllEqual(a.eval(), [0] * 5)
def testResourceVariable(self):
a = variables_lib2.local_variable(0)
b = variables_lib2.local_variable(0, use_resource=True)
self.assertTrue(isinstance(a, variables_lib.Variable))
self.assertFalse(isinstance(a, resource_variable_ops.ResourceVariable))
self.assertTrue(isinstance(b, resource_variable_ops.ResourceVariable))
class GlobalVariableTest(test.TestCase):
def test_global_variable(self):
with self.cached_session() as sess:
self.assertEquals([], variables_lib.global_variables())
value0 = 42
variables_lib2.global_variable(value0)
value1 = 43
variables_lib2.global_variable(value1)
variables = variables_lib.global_variables()
self.assertEquals(2, len(variables))
with self.assertRaisesOpError(
'Attempting to use uninitialized value Variable'):
sess.run(variables)
variables_lib.variables_initializer(variables).run()
self.assertAllEqual(set([value0, value1]), set(sess.run(variables)))
def testVariableNameAndShape(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable([1, 1, 1, 1, 1], name='a')
self.assertEquals(a.op.name, 'A/a')
self.assertListEqual(a.get_shape().as_list(), [5])
self.assertListEqual([a], variables_lib.global_variables())
def testGlobalVariableNotInLocalVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable(0)
self.assertFalse(a in variables_lib.local_variables())
self.assertTrue(a in variables_lib.global_variables())
def testGlobalVariableInVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable(0)
self.assertFalse(a in variables_lib.local_variables())
self.assertTrue(a in variables_lib2.get_variables_to_restore())
def testGetVariablesReturnsThem(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.global_variable(0)
with variable_scope.variable_scope('B'):
b = variables_lib2.global_variable(0)
self.assertEquals([a], variables_lib2.get_variables('A'))
self.assertEquals([b], variables_lib2.get_variables('B'))
def testGetLocalVariablesDontReturnsThem(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
variables_lib2.global_variable(0)
with variable_scope.variable_scope('B'):
variables_lib2.global_variable(0)
self.assertEquals([], variables_lib2.get_local_variables('A'))
self.assertEquals([], variables_lib2.get_local_variables('B'))
def testInitializedVariableValue(self):
with self.cached_session() as sess:
a = variables_lib2.global_variable([0, 0, 0, 0, 0], name='a')
sess.run(variables_lib.global_variables_initializer())
self.assertAllEqual(a.eval(), [0] * 5)
def testResourceVariable(self):
a = variables_lib2.global_variable(0)
b = variables_lib2.global_variable(0, use_resource=True)
self.assertTrue(isinstance(a, variables_lib.Variable))
self.assertFalse(isinstance(a, resource_variable_ops.ResourceVariable))
self.assertTrue(isinstance(b, resource_variable_ops.ResourceVariable))
class GlobalStepTest(test.TestCase):
def _assert_global_step(self, global_step, expected_dtype=dtypes.int64):
self.assertEquals('%s:0' % ops.GraphKeys.GLOBAL_STEP, global_step.name)
self.assertEquals(expected_dtype, global_step.dtype.base_dtype)
self.assertEquals([], global_step.get_shape().as_list())
def test_invalid_dtype(self):
with ops.Graph().as_default() as g:
self.assertEquals(None, variables_lib2.get_global_step())
variables_lib.VariableV1(
0.0,
trainable=False,
dtype=dtypes.float32,
name=ops.GraphKeys.GLOBAL_STEP)
self.assertRaisesRegexp(TypeError, 'does not have integer type',
variables_lib2.get_global_step)
self.assertRaisesRegexp(TypeError, 'does not have integer type',
variables_lib2.get_global_step, g)
def test_invalid_shape(self):
with ops.Graph().as_default() as g:
self.assertEquals(None, variables_lib2.get_global_step())
variables_lib.VariableV1(
[0],
trainable=False,
dtype=dtypes.int32,
name=ops.GraphKeys.GLOBAL_STEP)
self.assertRaisesRegexp(TypeError, 'not scalar',
variables_lib2.get_global_step)
self.assertRaisesRegexp(TypeError, 'not scalar',
variables_lib2.get_global_step, g)
def test_create_global_step(self):
self.assertEquals(None, variables_lib2.get_global_step())
with ops.Graph().as_default() as g:
global_step = variables_lib2.create_global_step()
self._assert_global_step(global_step)
self.assertRaisesRegexp(ValueError, 'already exists',
variables_lib2.create_global_step)
self.assertRaisesRegexp(ValueError, 'already exists',
variables_lib2.create_global_step, g)
self._assert_global_step(variables_lib2.create_global_step(ops.Graph()))
def test_get_global_step(self):
with ops.Graph().as_default() as g:
self.assertEquals(None, variables_lib2.get_global_step())
variables_lib.VariableV1(
0,
trainable=False,
dtype=dtypes.int32,
name=ops.GraphKeys.GLOBAL_STEP)
self._assert_global_step(
variables_lib2.get_global_step(), expected_dtype=dtypes.int32)
self._assert_global_step(
variables_lib2.get_global_step(g), expected_dtype=dtypes.int32)
def test_get_or_create_global_step(self):
with ops.Graph().as_default() as g:
self.assertEquals(None, variables_lib2.get_global_step())
self._assert_global_step(variables_lib2.get_or_create_global_step())
self._assert_global_step(variables_lib2.get_or_create_global_step(g))
class VariablesTest(test.TestCase):
def testCreateVariable(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
self.assertEquals(a.op.name, 'A/a')
self.assertListEqual(a.get_shape().as_list(), [5])
self.assertTrue(a in ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
self.assertFalse(a in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
self.assertFalse(a in variables_lib.local_variables())
def testGetVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.variable('a', [5])
self.assertEquals([a, b], variables_lib2.get_variables())
self.assertEquals([a], variables_lib2.get_variables('A'))
self.assertEquals([b], variables_lib2.get_variables('B'))
def testGetVariablesWithScope(self):
with self.cached_session():
with variable_scope.variable_scope('A') as var_scope:
a = variables_lib2.variable('a', [5])
b = variables_lib2.variable('b', [5])
self.assertSetEqual(
set([a, b]), set(variables_lib2.get_variables(var_scope)))
def testGetVariablesSuffix(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('A'):
b = variables_lib2.variable('b', [5])
self.assertEquals([a], variables_lib2.get_variables(suffix='a'))
self.assertEquals([b], variables_lib2.get_variables(suffix='b'))
def testGetVariableWithSingleVar(self):
with self.cached_session():
with variable_scope.variable_scope('parent'):
a = variables_lib2.variable('child', [5])
self.assertEquals(a, variables_lib2.get_unique_variable('parent/child'))
def testGetVariableWithDistractors(self):
with self.cached_session():
with variable_scope.variable_scope('parent'):
a = variables_lib2.variable('child', [5])
with variable_scope.variable_scope('child'):
variables_lib2.variable('grandchild1', [7])
variables_lib2.variable('grandchild2', [9])
self.assertEquals(a, variables_lib2.get_unique_variable('parent/child'))
def testGetVariableThrowsExceptionWithNoMatch(self):
var_name = 'cant_find_me'
with self.cached_session():
with self.assertRaises(ValueError):
variables_lib2.get_unique_variable(var_name)
def testGetThrowsExceptionWithChildrenButNoMatch(self):
var_name = 'parent/child'
with self.cached_session():
with variable_scope.variable_scope(var_name):
variables_lib2.variable('grandchild1', [7])
variables_lib2.variable('grandchild2', [9])
with self.assertRaises(ValueError):
variables_lib2.get_unique_variable(var_name)
def testGetVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.variable('a', [5])
self.assertEquals([a, b], variables_lib2.get_variables_to_restore())
def testIncludeGetVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.variable('a', [5])
self.assertEquals([a, b], variables_lib2.get_variables())
self.assertEquals([a], variables_lib2.get_variables_to_restore(['A']))
def testExcludeGetVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.variable('a', [5])
self.assertEquals([a, b], variables_lib2.get_variables())
self.assertEquals(
[a], variables_lib2.get_variables_to_restore(exclude=['B']))
def testWrongIncludeGetVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.variable('a', [5])
self.assertEquals([a, b], variables_lib2.get_variables())
self.assertEquals([], variables_lib2.get_variables_to_restore(['a']))
def testGetMixedVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
b = variables_lib2.variable('b', [5])
with variable_scope.variable_scope('B'):
c = variables_lib2.variable('c', [5])
d = variables_lib2.variable('d', [5])
self.assertEquals([a, b, c, d], variables_lib2.get_variables())
self.assertEquals(
[a, c],
variables_lib2.get_variables_to_restore(include=['A/a', 'B/c']))
def testExcludeGetMixedVariablesToRestore(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
b = variables_lib2.variable('b', [5])
with variable_scope.variable_scope('B'):
c = variables_lib2.variable('c', [5])
d = variables_lib2.variable('d', [5])
self.assertEquals([a, b, c, d], variables_lib2.get_variables())
self.assertEquals(
[b, d],
variables_lib2.get_variables_to_restore(exclude=['A/a', 'B/c']))
def testReuseVariable(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [])
with variable_scope.variable_scope('A', reuse=True):
b = variables_lib2.variable('a', [])
self.assertEquals(a, b)
self.assertListEqual([a], variables_lib2.get_variables())
def testVariableWithRegularizer(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [], regularizer=nn_ops.l2_loss)
loss = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertDeviceEqual(loss.device, a.device)
def testVariableWithRegularizerColocate(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable(
'a', [], device='gpu:0', regularizer=nn_ops.l2_loss)
loss = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertDeviceEqual(loss.device, a.device)
def testVariableWithDevice(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [], device='cpu:0')
b = variables_lib2.variable('b', [], device='cpu:1')
self.assertDeviceEqual(a.device, 'cpu:0')
self.assertDeviceEqual(b.device, 'cpu:1')
def testVariableWithDeviceFromScope(self):
with self.cached_session():
with ops.device('/cpu:0'):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [], device='cpu:1')
self.assertDeviceEqual(a.device, 'cpu:0')
self.assertDeviceEqual(b.device, 'cpu:1')
def testVariableWithDeviceFunction(self):
class DevFn(object):
def __init__(self):
self.counter = -1
def __call__(self, op):
self.counter += 1
return 'cpu:%d' % self.counter
with self.cached_session():
with arg_scope([variables_lib2.variable], device=DevFn()):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], device='cpu:12')
d = variables_lib2.variable('d', [])
with ops.device('cpu:99'):
e_init = constant_op.constant(12)
e = variables_lib2.variable('e', initializer=e_init)
self.assertDeviceEqual(a.device, 'cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, 'cpu:1')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
self.assertDeviceEqual(c.device, 'cpu:12')
self.assertEqual(c.initial_value.op.colocation_groups(),
c.op.colocation_groups())
self.assertDeviceEqual(d.device, 'cpu:2')
self.assertEqual(d.initial_value.op.colocation_groups(),
d.op.colocation_groups())
self.assertDeviceEqual(e.device, 'cpu:3')
self.assertDeviceEqual(e.initial_value.device, 'cpu:99')
def testVariableWithReplicaDeviceSetter(self):
with self.cached_session():
with ops.device(device_setter.replica_device_setter(ps_tasks=2)):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], device='cpu:12')
d = variables_lib2.variable('d', [])
with ops.device('cpu:99'):
e_init = constant_op.constant(12)
e = variables_lib2.variable('e', initializer=e_init)
# The values below highlight how the replica_device_setter puts initial
# values on the worker job, and how it merges explicit devices.
self.assertDeviceEqual(a.device, '/job:ps/task:0/cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, '/job:ps/task:1/cpu:0')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
self.assertDeviceEqual(c.device, '/job:ps/task:0/cpu:12')
self.assertEqual(c.initial_value.op.colocation_groups(),
c.op.colocation_groups())
self.assertDeviceEqual(d.device, '/job:ps/task:1/cpu:0')
self.assertEqual(d.initial_value.op.colocation_groups(),
d.op.colocation_groups())
self.assertDeviceEqual(e.device, '/job:ps/task:0/cpu:0')
self.assertDeviceEqual(e.initial_value.device, '/job:worker/cpu:99')
def testVariableWithVariableDeviceChooser(self):
with ops.Graph().as_default():
device_fn = variables_lib2.VariableDeviceChooser(num_tasks=2)
with arg_scope([variables_lib2.variable], device=device_fn):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], device='cpu:12')
d = variables_lib2.variable('d', [])
with ops.device('cpu:99'):
e_init = constant_op.constant(12)
e = variables_lib2.variable('e', initializer=e_init)
# The values below highlight how the VariableDeviceChooser puts initial
# values on the same device as the variable job.
self.assertDeviceEqual(a.device, '/job:ps/task:0/cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, '/job:ps/task:1/cpu:0')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
self.assertDeviceEqual(c.device, '/cpu:12')
self.assertEqual(c.initial_value.op.colocation_groups(),
c.op.colocation_groups())
self.assertDeviceEqual(d.device, '/job:ps/task:0/cpu:0')
self.assertEqual(d.initial_value.op.colocation_groups(),
d.op.colocation_groups())
self.assertDeviceEqual(e.device, '/job:ps/task:1/cpu:0')
self.assertDeviceEqual(e.initial_value.device, '/cpu:99')
def testVariableWithVariableDeviceChooserWithReplica(self):
with ops.Graph().as_default():
device_fn = variables_lib2.VariableDeviceChooser(replica=3, num_tasks=2)
with arg_scope([variables_lib2.variable], device=device_fn):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], device='cpu:12')
d = variables_lib2.variable('d', [])
with ops.device('cpu:99'):
e_init = constant_op.constant(12)
e = variables_lib2.variable('e', initializer=e_init)
# The values below highlight how the VariableDeviceChooser puts initial
# values on the same device as the variable job.
self.assertDeviceEqual(a.device, '/job:ps/replica:3/task:0/cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, '/job:ps/replica:3/task:1/cpu:0')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
self.assertDeviceEqual(c.device, '/cpu:12')
self.assertEqual(c.initial_value.op.colocation_groups(),
c.op.colocation_groups())
self.assertDeviceEqual(d.device, '/job:ps/replica:3/task:0/cpu:0')
self.assertEqual(d.initial_value.op.colocation_groups(),
d.op.colocation_groups())
self.assertDeviceEqual(e.device, '/job:ps/replica:3/task:1/cpu:0')
self.assertDeviceEqual(e.initial_value.device, '/cpu:99')
def testVariableGPUPlacement(self):
with ops.Graph().as_default():
device_fn = variables_lib2.VariableDeviceChooser(device_type='GPU')
with arg_scope([variables_lib2.variable], device=device_fn):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], device='cpu:12')
d = variables_lib2.variable('d', [])
with ops.device('cpu:99'):
e_init = constant_op.constant(12)
e = variables_lib2.variable('e', initializer=e_init)
# The values below highlight how the VariableDeviceChooser puts initial
# values on the same device as the variable job.
self.assertDeviceEqual(a.device, '/device:GPU:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, '/device:GPU:0')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
self.assertDeviceEqual(c.device, '/cpu:12')
self.assertEqual(c.initial_value.op.colocation_groups(),
c.op.colocation_groups())
self.assertDeviceEqual(d.device, '/device:GPU:0')
self.assertEqual(d.initial_value.op.colocation_groups(),
d.op.colocation_groups())
self.assertDeviceEqual(e.device, '/device:GPU:0')
self.assertDeviceEqual(e.initial_value.device, '/cpu:99')
class ModelVariablesTest(test.TestCase):
def testNameAndShape(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.model_variable('a', [5])
self.assertEquals(a.op.name, 'A/a')
self.assertListEqual(a.get_shape().as_list(), [5])
self.assertListEqual([a], variables_lib2.get_model_variables('A'))
def testNotInLocalVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.model_variable('a', [5])
self.assertTrue(a in variables_lib.global_variables())
self.assertTrue(a in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES))
self.assertFalse(a in variables_lib.local_variables())
def testGetVariablesReturns(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.model_variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.model_variable('a', [5])
self.assertEquals([a], variables_lib2.get_variables('A'))
self.assertEquals([b], variables_lib2.get_variables('B'))
def testGetModelVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.model_variable('a', [5])
with variable_scope.variable_scope('B'):
b = variables_lib2.model_variable('a', [5])
self.assertEquals([a], variables_lib2.get_model_variables('A'))
self.assertEquals([b], variables_lib2.get_model_variables('B'))
def testGetTrainableVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
variables_lib2.local_variable([5])
a = variables_lib.VariableV1([5])
with variable_scope.variable_scope('B'):
variables_lib2.local_variable([5])
b = variables_lib.VariableV1([5])
self.assertEquals([a], variables_lib2.get_trainable_variables('A'))
self.assertEquals([b], variables_lib2.get_trainable_variables('B'))
def testGetLocalVariables(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
_ = variables_lib2.model_variable('a', [5])
with variable_scope.variable_scope('B'):
_ = variables_lib2.model_variable('a', [5])
self.assertEquals([], variables_lib2.get_local_variables('A'))
self.assertEquals([], variables_lib2.get_local_variables('B'))
def testInitializedVariableValue(self):
with self.cached_session() as sess:
a = variables_lib2.model_variable(
'a', [5], initializer=init_ops.ones_initializer())
sess.run(variables_lib.global_variables_initializer())
self.assertAllEqual(a.eval(), [1] * 5)
def testDeviceFn(self):
class DevFn(object):
def __init__(self):
self.counter = -1
def __call__(self, op):
self.counter += 1
return '/cpu:%d' % self.counter
with ops.Graph().as_default():
with arg_scope([variables_lib2.model_variable], device=DevFn()):
a = variables_lib2.model_variable('a', [5])
b = variables_lib2.model_variable('b', [20])
self.assertDeviceEqual(a.device, '/cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, '/cpu:1')
self.assertEqual(b.initial_value.op.colocation_groups(),
b.op.colocation_groups())
def testVariableWithVariableDeviceChooser(self):
with ops.Graph().as_default():
device_fn = variables_lib2.VariableDeviceChooser()
with arg_scope([variables_lib2.model_variable], device=device_fn):
a = variables_lib2.model_variable('a', [5])
b = variables_lib2.model_variable('b', [20])
self.assertDeviceEqual(a.device, 'cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
self.assertDeviceEqual(b.device, 'cpu:0')
self.assertEqual(a.initial_value.op.colocation_groups(),
a.op.colocation_groups())
class GetVariablesCollections(test.TestCase):
def testVariableCollection(self):
with self.cached_session():
a = variables_lib2.variable('a', [], collections='A')
b = variables_lib2.variable('b', [], collections='B')
self.assertEquals(a, ops.get_collection('A')[0])
self.assertEquals(b, ops.get_collection('B')[0])
def testVariableCollections(self):
with self.cached_session():
a = variables_lib2.variable('a', [], collections=['A', 'C'])
b = variables_lib2.variable('b', [], collections=['B', 'C'])
self.assertEquals(a, ops.get_collection('A')[0])
self.assertEquals(b, ops.get_collection('B')[0])
self.assertListEqual([a, b], ops.get_collection('C'))
def testVariableCollectionsWithArgScope(self):
with self.cached_session():
with arg_scope([variables_lib2.variable], collections='A'):
a = variables_lib2.variable('a', [])
b = variables_lib2.variable('b', [])
self.assertListEqual([a, b], ops.get_collection('A'))
def testVariableCollectionsWithArgScopeNested(self):
with self.cached_session():
with arg_scope([variables_lib2.variable], collections='A'):
a = variables_lib2.variable('a', [])
with arg_scope([variables_lib2.variable], collections='B'):
b = variables_lib2.variable('b', [])
self.assertEquals(a, ops.get_collection('A')[0])
self.assertEquals(b, ops.get_collection('B')[0])
def testVariableCollectionsWithArgScopeNonNested(self):
with self.cached_session():
with arg_scope([variables_lib2.variable], collections='A'):
a = variables_lib2.variable('a', [])
with arg_scope([variables_lib2.variable], collections='B'):
b = variables_lib2.variable('b', [])
variables_lib2.variable('c', [])
self.assertListEqual([a], ops.get_collection('A'))
self.assertListEqual([b], ops.get_collection('B'))
def testVariableRestoreWithArgScopeNested(self):
with self.cached_session():
a = variables_lib2.variable('a', [])
with arg_scope(
[variables_lib2.variable], trainable=False, collections=['A', 'B']):
b = variables_lib2.variable('b', [])
c = variables_lib2.variable('c', [], trainable=False)
self.assertEquals([a, c], variables_lib2.get_variables_to_restore())
self.assertEquals([a], variables_lib.trainable_variables())
self.assertEquals([b], ops.get_collection('A'))
self.assertEquals([b], ops.get_collection('B'))
class GetVariablesBySuffixTest(test.TestCase):
def testGetVariableGivenNameScoped(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
b = variables_lib2.variable('b', [5])
self.assertEquals([a], variables_lib2.get_variables_by_suffix('a'))
self.assertEquals([b], variables_lib2.get_variables_by_suffix('b'))
def testGetVariableWithScope(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
fooa = variables_lib2.variable('fooa', [5])
with variable_scope.variable_scope('B'):
a2 = variables_lib2.variable('a', [5])
matched_variables = variables_lib2.get_variables_by_suffix('a')
self.assertEquals([a, fooa, a2], matched_variables)
matched_variables = variables_lib2.get_variables_by_suffix('/a')
self.assertEquals([a, a2], matched_variables)
matched_variables = variables_lib2.get_variables_by_suffix('a', scope='A')
self.assertEquals([a, fooa], matched_variables)
def testGetVariableWithoutScope(self):
with self.cached_session():
a = variables_lib2.variable('a', [5])
fooa = variables_lib2.variable('fooa', [5])
b_a = variables_lib2.variable('B/a', [5])
matched_variables = variables_lib2.get_variables_by_suffix('a')
self.assertEquals([a, fooa, b_a], matched_variables)
matched_variables = variables_lib2.get_variables_by_suffix('fooa')
self.assertEquals([fooa], matched_variables)
class GetVariablesByNameTest(test.TestCase):
def testGetVariableGivenNameScoped(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
b = variables_lib2.variable('b', [5])
self.assertEquals([a], variables_lib2.get_variables_by_name('a'))
self.assertEquals([b], variables_lib2.get_variables_by_name('b'))
def testGetVariableWithScope(self):
with self.cached_session():
with variable_scope.variable_scope('A'):
a = variables_lib2.variable('a', [5])
fooa = variables_lib2.variable('fooa', [5])
with variable_scope.variable_scope('B'):
a2 = variables_lib2.variable('a', [5])
matched_variables = variables_lib2.get_variables_by_name('a')
self.assertEquals([a, a2], matched_variables)
matched_variables = variables_lib2.get_variables_by_name('fooa')
self.assertEquals([fooa], matched_variables)
matched_variables = variables_lib2.get_variables_by_name('/a')
self.assertEquals([], matched_variables)
matched_variables = variables_lib2.get_variables_by_name('a', scope='A')
self.assertEquals([a], matched_variables)
def testGetVariableWithoutScope(self):
with self.cached_session():
a = variables_lib2.variable('a', [5])
fooa = variables_lib2.variable('fooa', [5])
b_a = variables_lib2.variable('B/a', [5])
matched_variables = variables_lib2.get_variables_by_name('a')
self.assertEquals([a, b_a], matched_variables)
matched_variables = variables_lib2.get_variables_by_name('fooa')
self.assertEquals([fooa], matched_variables)
class GetVariableFullNameTest(test.TestCase):
def testVariable(self):
my_var0 = variables_lib2.variable('my_var0', shape=[])
full_name = variables_lib2.get_variable_full_name(my_var0)
self.assertEquals(full_name, my_var0.op.name)
def testPartitionedVariable(self):
input_full_name = 'my_var0'
partitioner = partitioned_variables.variable_axis_size_partitioner(2)
my_var0 = variables_lib2.variable(
'my_var0', shape=[2, 2], partitioner=partitioner)
for part_var in list(my_var0):
computed_full_name = variables_lib2.get_variable_full_name(part_var)
self.assertEquals(input_full_name, computed_full_name)
class AssignFromValuesTest(test.TestCase):
def testNoScopes(self):
init_value0 = np.asarray([1.0, 3.0, 9.0]).reshape((1, 3, 1))
init_value1 = np.asarray([2.0, 4.0, 6.0, 8.0]).reshape((2, 1, 2))
with self.cached_session() as sess:
initializer = init_ops.truncated_normal_initializer(stddev=.1)
var0 = variables_lib2.variable(
'my_var0', shape=[1, 3, 1], initializer=initializer)
var1 = variables_lib2.variable(
'my_var1', shape=[2, 1, 2], initializer=initializer)
var_names_to_values = {'my_var0': init_value0, 'my_var1': init_value1}
assign_op, feed_dict = variables_lib2.assign_from_values(
var_names_to_values)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
sess.run(assign_op, feed_dict)
# Request and test the variable values:
var0, var1 = sess.run([var0, var1])
self.assertAllEqual(init_value0, var0)
self.assertAllEqual(init_value1, var1)
def testWithScopes(self):
init_value0 = np.asarray([1.0, 3.0, 9.0]).reshape((1, 3, 1))
init_value1 = np.asarray([2.0, 4.0, 6.0, 8.0]).reshape((2, 1, 2))
with self.cached_session() as sess:
initializer = init_ops.truncated_normal_initializer(stddev=.1)
with variable_scope.variable_scope('my_model/my_layer0'):
var0 = variables_lib2.variable(
'my_var0', shape=[1, 3, 1], initializer=initializer)
with variable_scope.variable_scope('my_model/my_layer1'):
var1 = variables_lib2.variable(
'my_var1', shape=[2, 1, 2], initializer=initializer)
var_names_to_values = {
'my_model/my_layer0/my_var0': init_value0,
'my_model/my_layer1/my_var1': init_value1
}
assign_op, feed_dict = variables_lib2.assign_from_values(
var_names_to_values)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
sess.run(assign_op, feed_dict)
# Request and test the variable values:
var0, var1 = sess.run([var0, var1])
self.assertAllEqual(init_value0, var0)
self.assertAllEqual(init_value1, var1)
class AssignFromValuesFnTest(test.TestCase):
def testNoScopes(self):
init_value0 = np.asarray([1.0, 3.0, 9.0]).reshape((1, 3, 1))
init_value1 = np.asarray([2.0, 4.0, 6.0, 8.0]).reshape((2, 1, 2))
with self.cached_session() as sess:
initializer = init_ops.truncated_normal_initializer(stddev=.1)
var0 = variables_lib2.variable(
'my_var0', shape=[1, 3, 1], initializer=initializer)
var1 = variables_lib2.variable(
'my_var1', shape=[2, 1, 2], initializer=initializer)
var_names_to_values = {'my_var0': init_value0, 'my_var1': init_value1}
init_fn = variables_lib2.assign_from_values_fn(var_names_to_values)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
var0, var1 = sess.run([var0, var1])
self.assertAllEqual(init_value0, var0)
self.assertAllEqual(init_value1, var1)
def testWithScopes(self):
init_value0 = np.asarray([1.0, 3.0, 9.0]).reshape((1, 3, 1))
init_value1 = np.asarray([2.0, 4.0, 6.0, 8.0]).reshape((2, 1, 2))
with self.cached_session() as sess:
initializer = init_ops.truncated_normal_initializer(stddev=.1)
with variable_scope.variable_scope('my_model/my_layer0'):
var0 = variables_lib2.variable(
'my_var0', shape=[1, 3, 1], initializer=initializer)
with variable_scope.variable_scope('my_model/my_layer1'):
var1 = variables_lib2.variable(
'my_var1', shape=[2, 1, 2], initializer=initializer)
var_names_to_values = {
'my_model/my_layer0/my_var0': init_value0,
'my_model/my_layer1/my_var1': init_value1
}
init_fn = variables_lib2.assign_from_values_fn(var_names_to_values)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
var0, var1 = sess.run([var0, var1])
self.assertAllEqual(init_value0, var0)
self.assertAllEqual(init_value1, var1)
class AssignFromCheckpointTest(test.TestCase):
def create_checkpoint_from_values(self,
var_names_to_values,
checkpoint_dir,
global_step=None):
"""Creates a checkpoint from a mapping of name to values in model_dir.
Args:
var_names_to_values: a map from variable names to values.
checkpoint_dir: the directory where the checkpoint will be saved.
global_step: the global step used to save the checkpoint.
Returns:
the model_path to the checkpoint.
"""
var_list = []
with session.Session('', graph=ops.Graph()) as sess:
# Create a set of variables to save in the checkpoint.
for var_name in var_names_to_values:
var_value = var_names_to_values[var_name]
var_list.append(variables_lib.VariableV1(var_value, name=var_name))
saver = saver_lib.Saver(var_list)
init_op = variables_lib.variables_initializer(var_list)
sess.run(init_op)
# Save the initialized values in the file at 'checkpoint_dir'
return saver.save(sess, checkpoint_dir, global_step=global_step)
def testLoadExistingVariables(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'load_existing_variables'))
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[])
var1 = variables_lib2.variable('my_var1', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1}
op, feed_dict = variables_lib2.assign_from_checkpoint(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
sess.run(op, feed_dict)
# Request and test the variable values:
self.assertEqual(init_value0, var0.eval())
self.assertEqual(init_value1, var1.eval())
# Tests restoring PartitionedVariables and tests using a dictionary
# of lists as the assign_from_checkpoint() var_list param.
def testLoadPartitionedVariables(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'load_partitioned_variables'))
init_value0 = np.array([[10.0, 11.0], [12.0, 13.0]])
init_value1 = np.array([20.0]) # Partitioned into 1 part, edge case.
var_names_to_values = {'var0': init_value0, 'var1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
# var0 and var1 are PartitionedVariables.
partitioner = partitioned_variables.variable_axis_size_partitioner(2)
var0 = variables_lib2.variable(
'var0', shape=init_value0.shape, partitioner=partitioner)
var0full = variables_lib2.variable('var0full', shape=init_value0.shape)
var1 = variables_lib2.variable(
'var1', shape=init_value1.shape, partitioner=partitioner)
# Convert var0 and var1 into a list of underlying variables.
vars_to_restore = {'var0': list(var0) + [var0full], 'var1': list(var1)}
op, feed_dict = variables_lib2.assign_from_checkpoint(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
sess.run(op, feed_dict)
# Request and test the variable values. PartitionedVariables can't
# be evaled so we wrap them in an identity.
self.assertTrue(
np.array_equal(init_value0,
array_ops.identity(var0).eval()))
self.assertTrue(np.array_equal(init_value0, var0full.eval()))
self.assertTrue(
np.array_equal(init_value1,
array_ops.identity(var1).eval()))
def testRaisesValueErrorIfAVariableIsntFound(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(),
'raises_value_error_if_var_isnt_found'))
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session():
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[])
var1 = variables_lib2.variable('my_var1', shape=[])
vars_to_restore = {'v0_fake': var0, 'v1': var1}
with self.assertRaises(ValueError):
variables_lib2.assign_from_checkpoint(model_path, vars_to_restore)
def testInitFromCheckpointWithScopes(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(),
'init_from_checkpoint_with_scopes'))
init_value0 = np.asarray(
[1.0, 3.0, 9.0], dtype=np.float32).reshape((1, 3, 1))
init_value1 = np.asarray(
[2.0, 4.0, 6.0, 8.0], dtype=np.float32).reshape((2, 1, 2))
var_names_to_values = {'layer0/v0': init_value0, 'layer1/v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
with variable_scope.variable_scope('my_model/my_layer0'):
var0 = variables_lib2.variable('my_var0', shape=init_value0.shape)
with variable_scope.variable_scope('my_model/my_layer1'):
var1 = variables_lib2.variable('my_var1', shape=init_value1.shape)
vars_to_restore = {'layer0/v0': var0, 'layer1/v1': var1}
op, feed_dict = variables_lib2.assign_from_checkpoint(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
sess.run(op, feed_dict)
# Request and test the variable values:
self.assertAllEqual(init_value0, var0.eval())
self.assertAllEqual(init_value1, var1.eval())
class AssignFromCheckpointFnTest(test.TestCase):
def create_checkpoint_from_values(self,
var_names_to_values,
checkpoint_dir,
global_step=None):
"""Creates a checkpoint from a mapping of name to values in model_dir.
Args:
var_names_to_values: a map from variable names to values.
checkpoint_dir: the directory where the checkpoint will be saved.
global_step: the global step used to save the checkpoint.
Returns:
the model_path to the checkpoint.
"""
var_list = []
with session.Session('', graph=ops.Graph()) as sess:
# Create a set of variables to save in the checkpoint.
for var_name in var_names_to_values:
var_value = var_names_to_values[var_name]
var_list.append(variables_lib.VariableV1(var_value, name=var_name))
saver = saver_lib.Saver(var_list)
init_op = variables_lib.variables_initializer(var_list)
sess.run(init_op)
# Save the initialized values in the file at 'checkpoint_dir'
return saver.save(sess, checkpoint_dir, global_step=global_step)
def testLoadExistingVariables(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'load_existing_variables'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[])
var1 = variables_lib2.variable('my_var1', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1}
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
self.assertEqual(init_value0, var0.eval())
self.assertEqual(init_value1, var1.eval())
def testLoadExistingVariablesDifferentShapeDefaultDoesNotAllowReshape(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(),
'load_existing_vars_no_reshape'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = [[10.0, 11.0]]
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[2, 1])
var1 = variables_lib2.variable('my_var1', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1}
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
with self.assertRaises(errors_impl.InvalidArgumentError):
init_fn(sess)
def testLoadExistingVariablesDifferentShapeAllowReshape(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(
self.get_temp_dir(),
'load_existing_variables_different_shape_allow_reshape'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = [[10.0, 11.0]]
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[2, 1])
var1 = variables_lib2.variable('my_var1', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1}
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore, reshape_variables=True)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
self.assertAllEqual(np.transpose(np.array(init_value0)), var0.eval())
self.assertEqual(init_value1, var1.eval())
def testNotFoundError(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'not_found_error'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[])
var1 = variables_lib2.variable('my_var1', shape=[])
var2 = variables_lib2.variable('my_var2', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1, 'v2': var2}
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
with self.assertRaises(errors_impl.NotFoundError):
init_fn(sess)
def testMissingVariablesList(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'missing_variables_list'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('v0', shape=[])
var1 = variables_lib2.variable('v1', shape=[])
var2 = variables_lib2.variable('v2', shape=[])
vars_to_restore = [var0, var1, var2]
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore, ignore_missing_vars=True)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
self.assertEqual(init_value0, var0.eval())
self.assertEqual(init_value1, var1.eval())
def testMissingVariablesDict(self):
model_dir = tempfile.mkdtemp(
prefix=os.path.join(self.get_temp_dir(), 'missing_variables_dict'))
if gfile.Exists(model_dir):
gfile.DeleteRecursively(model_dir)
init_value0 = 10.0
init_value1 = 20.0
var_names_to_values = {'v0': init_value0, 'v1': init_value1}
with self.cached_session() as sess:
model_path = self.create_checkpoint_from_values(var_names_to_values,
model_dir)
var0 = variables_lib2.variable('my_var0', shape=[])
var1 = variables_lib2.variable('my_var1', shape=[])
var2 = variables_lib2.variable('my_var2', shape=[])
vars_to_restore = {'v0': var0, 'v1': var1, 'v2': var2}
init_fn = variables_lib2.assign_from_checkpoint_fn(
model_path, vars_to_restore, ignore_missing_vars=True)
# Initialize the variables.
sess.run(variables_lib.global_variables_initializer())
# Perform the assignment.
init_fn(sess)
# Request and test the variable values:
self.assertEqual(init_value0, var0.eval())
self.assertEqual(init_value1, var1.eval())
class ZeroInitializerOpTest(test.TestCase):
def _testZeroInitializer(self, shape, initializer, use_init):
var = variables_lib.VariableV1(initializer)
var_zero = variables_lib2.zero_initializer(var)
with self.cached_session() as sess:
with self.assertRaisesOpError('Attempting to use uninitialized value'):
var.eval()
if use_init:
sess.run(var.initializer)
with self.assertRaisesOpError('input is already initialized'):
var_zero.eval()
self.assertAllClose(np.ones(shape), var.eval())
else:
var_zero.eval()
self.assertAllClose(np.zeros(shape), var.eval())
def testZeroInitializer(self):
for dtype in (dtypes.int32, dtypes.int64, dtypes.float32, dtypes.float64):
for use_init in (False, True):
self._testZeroInitializer([10, 20], array_ops.ones(
[10, 20], dtype=dtype), use_init)
class ZeroVarInitializerOpTest(test.TestCase):
def _testZeroVarInitializer(self, shape, initializer, use_init):
var = resource_variable_ops.ResourceVariable(initializer)
var_zero = variables_lib2.zero_initializer(var)
with self.cached_session() as sess:
with self.assertRaisesOpError('Error while reading resource variable'):
var.eval()
if use_init:
sess.run(var.initializer)
with self.assertRaisesOpError('input is already initialized'):
var_zero.eval()
self.assertAllClose(np.ones(shape), var.eval())
else:
var_zero.eval()
self.assertAllClose(np.zeros(shape), var.eval())
def testZeroVarInitializer(self):
for dtype in (dtypes.int32, dtypes.int64, dtypes.float32, dtypes.float64):
for use_init in (False, True):
self._testZeroVarInitializer([10, 20],
array_ops.ones([10, 20], dtype=dtype),
use_init)
class FilterVariablesTest(test.TestCase):
def setUp(self):
g = ops.Graph()
with g.as_default():
var_list = []
var_list.append(variables_lib.VariableV1(0, name='conv1/weights'))
var_list.append(variables_lib.VariableV1(0, name='conv1/biases'))
var_list.append(variables_lib.VariableV1(0, name='conv2/weights'))
var_list.append(variables_lib.VariableV1(0, name='conv2/biases'))
var_list.append(variables_lib.VariableV1(0, name='clfs/weights'))
var_list.append(variables_lib.VariableV1(0, name='clfs/biases'))
self._var_list = var_list
def _test_filter_variables(self,
expected_var_names,
include_patterns=None,
exclude_patterns=None,
reg_search=True):
filtered_var_list = variables_lib2.filter_variables(
self._var_list,
include_patterns=include_patterns,
exclude_patterns=exclude_patterns,
reg_search=reg_search)
filtered_var_names = [var.op.name for var in filtered_var_list]
for name in filtered_var_names:
self.assertIn(name, expected_var_names)
for name in expected_var_names:
self.assertIn(name, filtered_var_names)
self.assertEqual(len(filtered_var_names), len(expected_var_names))
def testNoFiltering(self):
self._test_filter_variables(expected_var_names=[
'conv1/weights', 'conv1/biases', 'conv2/weights', 'conv2/biases',
'clfs/weights', 'clfs/biases'
])
def testIncludeBiases(self):
self._test_filter_variables(
expected_var_names=['conv1/biases', 'conv2/biases', 'clfs/biases'],
include_patterns=['biases'])
def testExcludeWeights(self):
self._test_filter_variables(
expected_var_names=['conv1/biases', 'conv2/biases', 'clfs/biases'],
exclude_patterns=['weights'])
def testExcludeWeightsAndConv1(self):
self._test_filter_variables(
expected_var_names=['conv2/biases', 'clfs/biases'],
exclude_patterns=['weights', 'conv1'])
def testTwoIncludePatternsEnsureNoVariablesTwiceInFilteredList(self):
self._test_filter_variables(
expected_var_names=[
'conv1/weights', 'conv1/biases', 'conv2/weights', 'clfs/weights'
],
include_patterns=['conv1', 'weights'])
def testIncludeConv1ExcludeBiases(self):
self._test_filter_variables(
expected_var_names=['conv1/weights'],
include_patterns=['conv1'],
exclude_patterns=['biases'])
def testRegMatchIncludeBiases(self):
self._test_filter_variables(
expected_var_names=['conv1/biases', 'conv2/biases', 'clfs/biases'],
include_patterns=['.*biases'],
reg_search=False)
def testRegMatchIncludeBiasesWithIncompleteRegExpHasNoMatches(self):
self._test_filter_variables(
expected_var_names=[], include_patterns=['biases'], reg_search=False)
if __name__ == '__main__':
test.main()
| apache-2.0 |
eltonkevani/tempest_el_env | tempest/openstack/common/config/generator.py | 3 | 8725 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 SINA Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
from tempest.openstack.common import gettextutils
from tempest.openstack.common import importutils
gettextutils.install('tempest')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(srcfiles):
mods_by_pkg = dict()
for filepath in srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = filter(lambda x: x.endswith(PY_EXT), mods_by_pkg.keys())
pkg_names.sort()
ext_names = filter(lambda x: x not in pkg_names, mods_by_pkg.keys())
ext_names.sort()
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
for module_name in os.getenv(
"OSLO_CONFIG_GENERATOR_EXTRA_MODULES", "").split(','):
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group, opts in opts_by_group.items():
print_group_opts(group, opts)
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except ImportError as ie:
sys.stderr.write("%s\n" % str(ie))
return None
except Exception:
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value == socket.gethostname() and 'host' in name:
return 'tempest'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help += ' (' + OPT_TYPES[opt_type] + ')'
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, basestring))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
generate(sys.argv[1:])
if __name__ == '__main__':
main()
| apache-2.0 |
rchaber/publishbay | boilerplate/routes.py | 10 | 2696 | """
Using redirect route instead of simple routes since it supports strict_slash
Simple route: http://webapp-improved.appspot.com/guide/routing.html#simple-routes
RedirectRoute: http://webapp-improved.appspot.com/api/webapp2_extras/routes.html#webapp2_extras.routes.RedirectRoute
"""
from webapp2_extras.routes import RedirectRoute
import handlers
secure_scheme = 'https'
_routes = [
RedirectRoute('/taskqueue-send-email/', handlers.SendEmailHandler, name='taskqueue-send-email', strict_slash=True),
RedirectRoute('/_ah/login_required', handlers.LoginRequiredHandler),
RedirectRoute('/login/', handlers.LoginHandler, name='login', strict_slash=True),
RedirectRoute('/logout/', handlers.LogoutHandler, name='logout', strict_slash=True),
RedirectRoute('/social_login/<provider_name>', handlers.SocialLoginHandler, name='social-login', strict_slash=True),
RedirectRoute('/social_login/<provider_name>/complete', handlers.CallbackSocialLoginHandler, name='social-login-complete', strict_slash=True),
RedirectRoute('/social_login/<provider_name>/delete', handlers.DeleteSocialProviderHandler, name='delete-social-provider', strict_slash=True),
RedirectRoute('/register/', handlers.RegisterHandler, name='register', strict_slash=True),
RedirectRoute('/activation/<user_id>/<token>', handlers.AccountActivationHandler, name='account-activation', strict_slash=True),
RedirectRoute('/resend/<user_id>/<token>', handlers.ResendActivationEmailHandler, name='resend-account-activation', strict_slash=True),
RedirectRoute('/contact/', handlers.ContactHandler, name='contact', strict_slash=True),
RedirectRoute('/settings/profile', handlers.EditProfileHandler, name='edit-profile', strict_slash=True),
RedirectRoute('/settings/password', handlers.EditPasswordHandler, name='edit-password', strict_slash=True),
RedirectRoute('/settings/email', handlers.EditEmailHandler, name='edit-email', strict_slash=True),
RedirectRoute('/password-reset/', handlers.PasswordResetHandler, name='password-reset', strict_slash=True),
RedirectRoute('/password-reset/<user_id>/<token>', handlers.PasswordResetCompleteHandler, name='password-reset-check', strict_slash=True),
RedirectRoute('/change-email/<user_id>/<encoded_email>/<token>', handlers.EmailChangedCompleteHandler, name='email-changed-check', strict_slash=True),
RedirectRoute('/abtest/', handlers.AbTestHandler, name='abtest', strict_slash=True),
RedirectRoute('/', handlers.HomeRequestHandler, name='home', strict_slash=True)
]
def get_routes():
return _routes
def add_routes(app):
if app.debug:
secure_scheme = 'http'
for r in _routes:
app.router.add(r)
| lgpl-3.0 |
theanalyst/cinder | cinder/api/contrib/image_create.py | 44 | 1062 | # Copyright (c) 2012 NTT.
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Create Volume from Image extension."""
from cinder.api import extensions
class Image_create(extensions.ExtensionDescriptor):
"""Allow creating a volume from an image in the Create Volume v1 API."""
name = "CreateVolumeExtension"
alias = "os-image-create"
namespace = "http://docs.openstack.org/volume/ext/image-create/api/v1"
updated = "2012-08-13T00:00:00+00:00"
| apache-2.0 |
michaelkuty/django-oscar | src/oscar/apps/offer/migrations/0001_initial.py | 52 | 15207 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import oscar.models.fields.autoslugfield
from decimal import Decimal
import oscar.models.fields
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Benefit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(verbose_name='Type', max_length=128, blank=True, choices=[('Percentage', "Discount is a percentage off of the product's value"), ('Absolute', "Discount is a fixed amount off of the product's value"), ('Multibuy', 'Discount is to give the cheapest product for free'), ('Fixed price', 'Get the products that meet the condition for a fixed price'), ('Shipping absolute', 'Discount is a fixed amount of the shipping cost'), ('Shipping fixed price', 'Get shipping for a fixed price'), ('Shipping percentage', 'Discount is a percentage off of the shipping cost')])),
('value', oscar.models.fields.PositiveDecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Value', null=True)),
('max_affected_items', models.PositiveIntegerField(verbose_name='Max Affected Items', blank=True, help_text='Set this to prevent the discount consuming all items within the range that are in the basket.', null=True)),
('proxy_class', oscar.models.fields.NullCharField(unique=True, verbose_name='Custom class', default=None, max_length=255)),
],
options={
'verbose_name_plural': 'Benefits',
'verbose_name': 'Benefit',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Condition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(verbose_name='Type', max_length=128, blank=True, choices=[('Count', 'Depends on number of items in basket that are in condition range'), ('Value', 'Depends on value of items in basket that are in condition range'), ('Coverage', 'Needs to contain a set number of DISTINCT items from the condition range')])),
('value', oscar.models.fields.PositiveDecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Value', null=True)),
('proxy_class', oscar.models.fields.NullCharField(unique=True, verbose_name='Custom class', default=None, max_length=255)),
],
options={
'verbose_name_plural': 'Conditions',
'verbose_name': 'Condition',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ConditionalOffer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(verbose_name='Name', unique=True, max_length=128, help_text="This is displayed within the customer's basket")),
('slug', oscar.models.fields.autoslugfield.AutoSlugField(populate_from='name', unique=True, verbose_name='Slug', max_length=128, editable=False, blank=True)),
('description', models.TextField(verbose_name='Description', help_text='This is displayed on the offer browsing page', blank=True)),
('offer_type', models.CharField(default='Site', max_length=128, verbose_name='Type', choices=[('Site', 'Site offer - available to all users'), ('Voucher', 'Voucher offer - only available after entering the appropriate voucher code'), ('User', 'User offer - available to certain types of user'), ('Session', 'Session offer - temporary offer, available for a user for the duration of their session')])),
('status', models.CharField(default='Open', max_length=64, verbose_name='Status')),
('priority', models.IntegerField(default=0, verbose_name='Priority', help_text='The highest priority offers are applied first')),
('start_datetime', models.DateTimeField(blank=True, verbose_name='Start date', null=True)),
('end_datetime', models.DateTimeField(verbose_name='End date', blank=True, help_text="Offers are active until the end of the 'end date'", null=True)),
('max_global_applications', models.PositiveIntegerField(verbose_name='Max global applications', blank=True, help_text='The number of times this offer can be used before it is unavailable', null=True)),
('max_user_applications', models.PositiveIntegerField(verbose_name='Max user applications', blank=True, help_text='The number of times a single user can use this offer', null=True)),
('max_basket_applications', models.PositiveIntegerField(verbose_name='Max basket applications', blank=True, help_text='The number of times this offer can be applied to a basket (and order)', null=True)),
('max_discount', models.DecimalField(verbose_name='Max discount', max_digits=12, decimal_places=2, null=True, help_text='When an offer has given more discount to orders than this threshold, then the offer becomes unavailable', blank=True)),
('total_discount', models.DecimalField(default=Decimal('0.00'), max_digits=12, decimal_places=2, verbose_name='Total Discount')),
('num_applications', models.PositiveIntegerField(default=0, verbose_name='Number of applications')),
('num_orders', models.PositiveIntegerField(default=0, verbose_name='Number of Orders')),
('redirect_url', oscar.models.fields.ExtendedURLField(verbose_name='URL redirect (optional)', blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
('benefit', models.ForeignKey(verbose_name='Benefit', to='offer.Benefit')),
('condition', models.ForeignKey(verbose_name='Condition', to='offer.Condition')),
],
options={
'ordering': ['-priority'],
'verbose_name_plural': 'Conditional offers',
'verbose_name': 'Conditional offer',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Range',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(unique=True, max_length=128, verbose_name='Name')),
('slug', oscar.models.fields.autoslugfield.AutoSlugField(populate_from='name', unique=True, verbose_name='Slug', max_length=128, editable=False, blank=True)),
('description', models.TextField(blank=True)),
('is_public', models.BooleanField(default=False, verbose_name='Is public?', help_text='Public ranges have a customer-facing page')),
('includes_all_products', models.BooleanField(default=False, verbose_name='Includes all products?')),
('proxy_class', oscar.models.fields.NullCharField(unique=True, verbose_name='Custom class', default=None, max_length=255)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
('classes', models.ManyToManyField(related_name='classes', verbose_name='Product Types', to='catalogue.ProductClass', blank=True)),
('excluded_products', models.ManyToManyField(related_name='excludes', verbose_name='Excluded Products', to='catalogue.Product', blank=True)),
('included_categories', models.ManyToManyField(related_name='includes', verbose_name='Included Categories', to='catalogue.Category', blank=True)),
],
options={
'verbose_name_plural': 'Ranges',
'verbose_name': 'Range',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RangeProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('display_order', models.IntegerField(default=0)),
('product', models.ForeignKey(to='catalogue.Product')),
('range', models.ForeignKey(to='offer.Range')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RangeProductFileUpload',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filepath', models.CharField(max_length=255, verbose_name='File Path')),
('size', models.PositiveIntegerField(verbose_name='Size')),
('date_uploaded', models.DateTimeField(auto_now_add=True, verbose_name='Date Uploaded')),
('status', models.CharField(default='Pending', max_length=32, verbose_name='Status', choices=[('Pending', 'Pending'), ('Failed', 'Failed'), ('Processed', 'Processed')])),
('error_message', models.CharField(max_length=255, verbose_name='Error Message', blank=True)),
('date_processed', models.DateTimeField(verbose_name='Date Processed', null=True)),
('num_new_skus', models.PositiveIntegerField(verbose_name='Number of New SKUs', null=True)),
('num_unknown_skus', models.PositiveIntegerField(verbose_name='Number of Unknown SKUs', null=True)),
('num_duplicate_skus', models.PositiveIntegerField(verbose_name='Number of Duplicate SKUs', null=True)),
('range', models.ForeignKey(verbose_name='Range', related_name='file_uploads', to='offer.Range')),
('uploaded_by', models.ForeignKey(verbose_name='Uploaded By', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-date_uploaded',),
'verbose_name_plural': 'Range Product Uploaded Files',
'verbose_name': 'Range Product Uploaded File',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='rangeproduct',
unique_together=set([('range', 'product')]),
),
migrations.AddField(
model_name='range',
name='included_products',
field=models.ManyToManyField(related_name='includes', verbose_name='Included Products', to='catalogue.Product', through='offer.RangeProduct', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='condition',
name='range',
field=models.ForeignKey(null=True, verbose_name='Range', to='offer.Range', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='benefit',
name='range',
field=models.ForeignKey(null=True, verbose_name='Range', to='offer.Range', blank=True),
preserve_default=True,
),
migrations.CreateModel(
name='AbsoluteDiscountBenefit',
fields=[
],
options={
'verbose_name_plural': 'Absolute discount benefits',
'verbose_name': 'Absolute discount benefit',
'proxy': True,
},
bases=('offer.benefit',),
),
migrations.CreateModel(
name='CountCondition',
fields=[
],
options={
'verbose_name_plural': 'Count conditions',
'verbose_name': 'Count condition',
'proxy': True,
},
bases=('offer.condition',),
),
migrations.CreateModel(
name='CoverageCondition',
fields=[
],
options={
'verbose_name_plural': 'Coverage Conditions',
'verbose_name': 'Coverage Condition',
'proxy': True,
},
bases=('offer.condition',),
),
migrations.CreateModel(
name='FixedPriceBenefit',
fields=[
],
options={
'verbose_name_plural': 'Fixed price benefits',
'verbose_name': 'Fixed price benefit',
'proxy': True,
},
bases=('offer.benefit',),
),
migrations.CreateModel(
name='MultibuyDiscountBenefit',
fields=[
],
options={
'verbose_name_plural': 'Multibuy discount benefits',
'verbose_name': 'Multibuy discount benefit',
'proxy': True,
},
bases=('offer.benefit',),
),
migrations.CreateModel(
name='PercentageDiscountBenefit',
fields=[
],
options={
'verbose_name_plural': 'Percentage discount benefits',
'verbose_name': 'Percentage discount benefit',
'proxy': True,
},
bases=('offer.benefit',),
),
migrations.CreateModel(
name='ShippingBenefit',
fields=[
],
options={
'proxy': True,
},
bases=('offer.benefit',),
),
migrations.CreateModel(
name='ShippingAbsoluteDiscountBenefit',
fields=[
],
options={
'verbose_name_plural': 'Shipping absolute discount benefits',
'verbose_name': 'Shipping absolute discount benefit',
'proxy': True,
},
bases=('offer.shippingbenefit',),
),
migrations.CreateModel(
name='ShippingFixedPriceBenefit',
fields=[
],
options={
'verbose_name_plural': 'Fixed price shipping benefits',
'verbose_name': 'Fixed price shipping benefit',
'proxy': True,
},
bases=('offer.shippingbenefit',),
),
migrations.CreateModel(
name='ShippingPercentageDiscountBenefit',
fields=[
],
options={
'verbose_name_plural': 'Shipping percentage discount benefits',
'verbose_name': 'Shipping percentage discount benefit',
'proxy': True,
},
bases=('offer.shippingbenefit',),
),
migrations.CreateModel(
name='ValueCondition',
fields=[
],
options={
'verbose_name_plural': 'Value conditions',
'verbose_name': 'Value condition',
'proxy': True,
},
bases=('offer.condition',),
),
]
| bsd-3-clause |
ArVID220u/TheHumanBot | similarity_analyzer.py | 1 | 9527 | # import time for the export data loop
import time
# send error message
from error_messenger import send_error_message
# The similarity analyzer class
# It needs to be a class, since it needs to have an initialization
class SimilarityAnalyzer():
# The frequency dictionary contains information on how frequent a word is
# The dictionary contains raw frequency count
word_frequency = {}
# The max frequency is the average of the frequency of the top 10 most common words
# It is used to make sense of the numbers in the word_frequency dictionary
max_frequency = 1 # dummy value
# a list of the ten biggest frequency counts, so as to know when to update the max_frequency
ten_biggest_frequencies = [] #[1 for x in range(10)] # dummy values
# On init, load the frequency dictionary file into a native dictionary
# This is to reduce access time from linear to constant
def __init__(self):
# The frequency data is stored in word_frequencies.txt
# The first line contains the ten biggest frequencies, space separated
# The next lines contain a word followed by a space, followed by the frequency count
with open("word_frequencies.txt") as frequency_data:
# bool to indicate whether it is the first line or not
first_line = True
# iterate over each line
for line in frequency_data:
# remove the newline at the end
line_string = line[:-1]
if first_line:
# The first line contains the ten biggest_frequencies
self.ten_biggest_frequencies = [int(x) for x in line_string.split(" ")]
# update the max frequency to be the average
self.max_frequency = 0
for freq in self.ten_biggest_frequencies:
self.max_frequency += freq
self.max_frequency /= len(self.ten_biggest_frequencies)
first_line = False
else:
# The first is the word, the second is the count
word_count_list = line_string.split(" ")
if len(word_count_list) >= 2:
self.word_frequency[word_count_list[0]] = int(word_count_list[1])
# Analyze the incoming tweet for similarity with any tweet in the responses data
# Return the best match's response, along with the similarity ratio, in a tuple
# Like, (best_match_response_text, similarity_ratio)
# The max_length parameter defines the maximum length of the response
def analyze_tweet(self, tweet_text, max_length):
if tweet_text == "":
# this should never happen, but just in case
return ("", 0)
# make a set of words out of the tweet_text
base_words_set = set(tweet_text.split(" "))
# make a list for faster enumeration
base_words_list = list(base_words_set)
# update the word frequencies dict
self.update_word_frequency(tweet_text.split(" "))
# the best response and the best similarity score will be stored here
best_response = ""
best_similarity_score = 0
# open the responses data file
# all odd rows (with indexing starting at 0) contain the base tweets, i.e. the ones to be checked against
# all even rows contain the responses
with open("responses.txt") as responses_file:
# This string contains the last response, which belongs to the base tweet in the next row
last_response = ""
# This ticker keeps count of even or odd row
even_or_odd_ticker = 0
# iterate over all lines, which each contains a tweet
# all newlines have been converted to explicitly written newlines "\\n", \n
for line in responses_file:
# remove the last newline character
test_string = line[:-1]
# remove all instances of encoded newlines, into real newlines
test_string = test_string.replace("\\n", "\n")
# if odd, then do similarity check
if even_or_odd_ticker == 1:
even_or_odd_ticker = 0
# the similarity score between the base string and the test string
similarity_score = 0
# this is a tweet that should be analyzed against the base tweet
test_words_set = set(test_string.split(" "))
# iterate over each base word, and check if it is in the test words
for base_word in base_words_list:
if base_word in test_words_set:
# increase the similarity score by a value between 1.0 and 3.0, depending on the frequency
similarity_score += 1 + 2 * (1 - min(1, self.word_frequency[base_word] / self.max_frequency))
# iterate over each test string word, and subtract 0.05 from the similarity score for each word not in the base string
for test_word in test_words_set:
if test_word not in base_words_set:
similarity_score -= 0.05
# now we have a good similarity score between these two strings
# check whether it is the best, and using the most recent one if there are many with same score
if similarity_score >= best_similarity_score:
# the length of the last response has to be less than max_length
if len(last_response) <= max_length:
# yay, we can add the last response to the best response, and update the best similarity score
best_similarity_score = similarity_score
best_response = last_response
elif even_or_odd_ticker == 0:
even_or_odd_ticker = 1
# update the last response
last_response = test_string
# we now have the best response!
# convert the score into a ratio by first calculating the max score
max_score = 0
for base_word in base_words_list:
max_score += 1 + 2 * (1 - min(1, self.word_frequency[base_word] / self.max_frequency))
# now, the similarity ratio is calculated by dividing the best match score with the max score
# it is a standardized value between 0 and 1, with 1 indicating a full match and 0 indicating no common words
similarity_ratio = best_similarity_score / max_score
# now return the best match, and the ratio, as a tuple
return (best_response, similarity_ratio)
# A helper function for updating the word frequency list
# It also updates the max frequency
def update_word_frequency(self, words):
for word in words:
if word == "":
continue
if word in self.word_frequency:
self.word_frequency[word] += 1
else:
self.word_frequency[word] = 1
# maybe update the max frequency
least_frequency = self.word_frequency[word]
least_frequency_index = 0
for index, top10_frequency in enumerate(self.ten_biggest_frequencies):
if top10_frequency < least_frequency:
least_frequency = top10_frequency
least_frequency_index = index
if self.word_frequency[word] > least_frequency:
self.ten_biggest_frequencies[least_frequency_index] = self.word_frequency[word]
# update the max frequency
self.max_frequency += self.word_frequency[word] / 10 - least_frequency / 10
# The export data loop is run in its own thread, indefinitely
# Every hour, it writes the frequency dictionary to disk
# In future implementations, and if humanbot grows big, it also uploads
# the response data and the frequency data to the server
def export_data_loop(self):
while True:
try:
# Sleep for an hour
time.sleep(60 * 60)
# Export the data
# (1) write the word frequencies to the word_frequencies.txt
print("writing word frequencies to disk. DON'T STOP PROGRAM EXECUTION, or else information may be lost")
with open("word_frequencies.txt", "w") as frequency_file:
# first write the top 10 biggest frequnecies as the first line, joined by a space
frequency_file.write(" ".join(str(x) for x in self.ten_biggest_frequencies) + "\n")
# then, each entry in the word_frequency dict should occupy one line
for word, count in self.word_frequency.items():
frequency_file.write(word + " " + str(count) + "\n")
print("finished writing word frequencies to disk. it is now (relatively) safe to halt the execution.")
except Exception as exception:
print(exception)
print("Error in export_data_loop. will sleep for 2 hours")
send_error_message(exception, "export_data_loop")
time.sleep(2 * 60 * 60)
print("has slept after exception in export_data_loop. will now resume")
| mit |
scottmcmaster/catapult | dashboard/dashboard/report_test.py | 4 | 5373 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
import webapp2
import webtest
from google.appengine.ext import ndb
from dashboard import report
from dashboard import testing_common
from dashboard import update_test_suites
from dashboard.models import page_state
class ReportTest(testing_common.TestCase):
def setUp(self):
super(ReportTest, self).setUp()
app = webapp2.WSGIApplication(
[('/report', report.ReportHandler),
('/update_test_suites', update_test_suites.UpdateTestSuitesHandler)])
self.testapp = webtest.TestApp(app)
def _AddTestSuites(self):
"""Adds sample data and sets the list of test suites."""
# Mock out some data for a test.
masters = [
'ChromiumPerf',
'ChromiumGPU',
]
bots = [
'chromium-rel-win7-gpu-ati',
'linux-release',
]
tests = {
'scrolling_benchmark': {
'a_first_listed_test': {},
'average_commit_time': {
'answers.yahoo.com': {},
'www.cnn.com': {},
},
'average_commit_time_ref': {},
},
'dromaeo': {},
}
testing_common.AddTests(masters, bots, tests)
for m in masters:
for b in bots:
for t in tests:
t = ndb.Key('Master', m, 'Bot', b, 'Test', t).get()
t.description = 'This should show up'
t.put()
# Before the test suites data gets generated, the cached test suites
# data must be updated.
self.testapp.post('/update_test_suites')
def testGet_EmbedsTestSuites(self):
self._AddTestSuites()
# We expect to have this JavaScript in the rendered HTML.
expected_suites = {
'scrolling_benchmark': {
'masters': {
'ChromiumPerf': ['chromium-rel-win7-gpu-ati', 'linux-release'],
'ChromiumGPU': ['chromium-rel-win7-gpu-ati', 'linux-release'],
},
'monitored': [],
'description': 'This should show up',
'deprecated': False,
},
'dromaeo': {
'masters': {
'ChromiumPerf': ['chromium-rel-win7-gpu-ati', 'linux-release'],
'ChromiumGPU': ['chromium-rel-win7-gpu-ati', 'linux-release'],
},
'monitored': [],
'description': 'This should show up',
'deprecated': False,
},
}
response = self.testapp.get('/report')
actual_suites = self.GetEmbeddedVariable(response, 'TEST_SUITES')
self.assertEqual(expected_suites, actual_suites)
def testGet_OldUri(self):
expected_state = {
'charts': [
[['ChromiumGPU/linux/scrolling/num_layers', ['num_layers']]],
[['ChromiumGPU/linux/scrolling/num_layers/about.com',
['num_layers']]],
[['ChromiumGPU/win/scrolling/num_layers', ['num_layers']]],
[['ChromiumGPU/win/scrolling/num_layers/about.com',
['num_layers']]],
]
}
response = self.testapp.get(
'/report'
'?masters=ChromiumGPU&bots=linux,win'
'&tests=scrolling/num_layers,scrolling/num_layers/about.com'
'&checked=num_layers')
# We expect to get a URL redirect with an sid.
location = response.headers.get('location')
self.assertIn('sid=', location)
state_id = location.split('sid=')[1]
state = ndb.Key(page_state.PageState, state_id).get()
self.assertEqual(json.dumps(expected_state, separators=(',', ':')),
state.value)
def testGet_OldUriMissingTestParam(self):
response = self.testapp.get(
'/report'
'?masters=ChromiumGPU&bots=linux,win'
'&checked=num_layers')
location = response.headers.get('location')
self.assertIsNone(location)
states = page_state.PageState.query().fetch()
self.assertEqual(0, len(states))
def testGet_OldUriMissingSubTest(self):
self._AddTestSuites()
testing_common.AddRows(
'ChromiumGPU/linux-release/scrolling_benchmark/a_first_listed_test',
{200})
expected_state = {
'charts': [
[[('ChromiumGPU/linux-release/scrolling_benchmark/'
'a_first_listed_test'),
['a_first_listed_test']]],
]
}
response = self.testapp.get(
'/report'
'?masters=ChromiumGPU&bots=linux-release'
'&tests=scrolling_benchmark')
# We expect to get a URL redirect with an sid.
location = response.headers.get('location')
self.assertIn('sid=', location)
state_id = location.split('sid=')[1]
state = ndb.Key(page_state.PageState, state_id).get()
self.assertEqual(json.dumps(expected_state, separators=(',', ':')),
state.value)
def testGet_OldUriWithRevisionParams(self):
response = self.testapp.get(
'/report'
'?masters=ChromiumGPU&bots=linux,win'
'&tests=scrolling/num_layers,scrolling/num_layers/about.com'
'&checked=num_layers&start_rev=1234&end_rev=5678')
location = response.headers.get('location')
self.assertIn('sid=', location)
self.assertIn('start_rev=1234', location)
self.assertIn('end_rev=5678', location)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
ycsoft/FatCat-Server | LIBS/boost_1_58_0/tools/build/src/build/configure.py | 9 | 5453 | # Status: ported.
# Base revison: 64488
#
# Copyright (c) 2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
# build process.
# - Reporting what is built, and how it is configured.
import b2.build.property as property
import b2.build.property_set as property_set
import b2.build.targets
from b2.manager import get_manager
from b2.util.sequence import unique
from b2.util import bjam_signature, value_to_jam
import bjam
import os
__width = 30
def set_width(width):
global __width
__width = 30
__components = []
__built_components = []
__component_logs = {}
__announced_checks = False
__log_file = None
__log_fd = -1
def register_components(components):
"""Declare that the components specified by the parameter exist."""
__components.extend(components)
def components_building(components):
"""Declare that the components specified by the parameters will be build."""
__built_components.extend(components)
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
__component_logs.setdefault(component, []).append(message)
def log_check_result(result):
global __announced_checks
if not __announced_checks:
print "Performing configuration checks"
__announced_checks = True
print result
def log_library_search_result(library, result):
log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width))
def print_component_configuration():
print "\nComponent configuration:"
for c in __components:
if c in __built_components:
s = "building"
else:
s = "not building"
message = " - %s)" % c
message = message.rjust(__width)
message += " : " + s
for m in __component_logs.get(c, []):
print " -" + m
print ""
__builds_cache = {}
def builds(metatarget_reference, project, ps, what):
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
result = []
existing = __builds_cache.get((what, ps), None)
if existing is None:
result = False
__builds_cache[(what, ps)] = False
targets = b2.build.targets.generate_from_reference(
metatarget_reference, project, ps).targets()
jam_targets = []
for t in targets:
jam_targets.append(t.actualize())
x = (" - %s" % what).rjust(__width)
if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
__builds_cache[(what, ps)] = True
result = True
log_check_result("%s: yes" % x)
else:
log_check_result("%s: no" % x)
return result
else:
return existing
def set_log_file(log_file_name):
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
global __log_file, __log_fd
dirname = os.path.dirname(log_file_name)
if not os.path.exists(dirname):
os.makedirs(dirname)
# Make sure to keep the file around, so that it's not
# garbage-collected and closed
__log_file = open(log_file_name, "w")
__log_fd = __log_file.fileno()
# Frontend rules
class CheckTargetBuildsWorker:
def __init__(self, target, true_properties, false_properties):
self.target = target
self.true_properties = property.create_from_strings(true_properties, True)
self.false_properties = property.create_from_strings(false_properties, True)
def check(self, ps):
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
toolset = ps.get('toolset')[0]
toolset_version_property = "<toolset-" + toolset + ":version>" ;
relevant = ps.get_properties('target-os') + \
ps.get_properties("toolset") + \
ps.get_properties(toolset_version_property) + \
ps.get_properties("address-model") + \
ps.get_properties("architecture")
rps = property_set.create(relevant)
t = get_manager().targets().current()
p = t.project()
if builds(self.target, p, rps, "%s builds" % self.target):
choosen = self.true_properties
else:
choosen = self.false_properties
return property.evaluate_conditionals_in_context(choosen, ps)
@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"]))
def check_target_builds(target, true_properties, false_properties):
worker = CheckTargetBuildsWorker(target, true_properties, false_properties)
value = value_to_jam(worker.check)
return "<conditional>" + value
get_manager().projects().add_rule("check-target-builds", check_target_builds)
| mit |
MounirMesselmeni/django | django/core/serializers/python.py | 31 | 7859 | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils import six
from django.utils.encoding import force_text, is_protected_type
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = OrderedDict()
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
model = obj._meta.proxy_for_model if obj._deferred else obj.__class__
data = OrderedDict([('model', force_text(model._meta))])
if not self.use_natural_primary_keys or not hasattr(obj, 'natural_key'):
data["pk"] = force_text(obj._get_pk_val(), strings_only=True)
data['fields'] = self._current
return data
def handle_field(self, obj, field):
value = field.value_from_object(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
if is_protected_type(value):
self._current[field.name] = value
else:
self._current[field.name] = field.value_to_string(obj)
def handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = getattr(obj, field.get_attname())
if not is_protected_type(value):
value = field.value_to_string(obj)
self._current[field.name] = value
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.remote_field.model, 'natural_key'):
m2m_value = lambda value: value.natural_key()
else:
m2m_value = lambda value: force_text(value._get_pk_val(), strings_only=True)
self._current[field.name] = [m2m_value(related)
for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
db = options.pop('using', DEFAULT_DB_ALIAS)
ignore = options.pop('ignorenonexistent', False)
field_names_cache = {} # Model: <list of field_names>
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignore:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
if Model not in field_names_cache:
field_names_cache[Model] = {f.name for f in Model._meta.get_fields()}
field_names = field_names_cache[Model]
# Handle each field
for (field_name, field_value) in six.iteritems(d["fields"]):
if ignore and field_name not in field_names:
# skip fields no longer on model
continue
if isinstance(field_value, str):
field_value = force_text(
field_value, options.get("encoding", settings.DEFAULT_CHARSET), strings_only=True
)
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
model = field.remote_field.model
if hasattr(model._default_manager, 'get_by_natural_key'):
def m2m_convert(value):
if hasattr(value, '__iter__') and not isinstance(value, six.text_type):
return model._default_manager.db_manager(db).get_by_natural_key(*value).pk
else:
return force_text(model._meta.pk.to_python(value), strings_only=True)
else:
m2m_convert = lambda v: force_text(model._meta.pk.to_python(v), strings_only=True)
try:
m2m_data[field.name] = []
for pk in field_value:
m2m_data[field.name].append(m2m_convert(pk))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), pk)
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
model = field.remote_field.model
if field_value is not None:
try:
default_manager = model._default_manager
field_name = field.remote_field.field_name
if hasattr(default_manager, 'get_by_natural_key'):
if hasattr(field_value, '__iter__') and not isinstance(field_value, six.text_type):
obj = default_manager.db_manager(db).get_by_natural_key(*field_value)
value = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if model._meta.pk.remote_field:
value = value.pk
else:
value = model._meta.get_field(field_name).to_python(field_value)
data[field.attname] = value
else:
data[field.attname] = model._meta.get_field(field_name).to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
else:
data[field.attname] = None
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, db)
yield base.DeserializedObject(obj, m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.model_name" string.
"""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
| bsd-3-clause |
sovietspy2/uzletiProject | framework/pym/play/commands/eclipse.py | 2 | 8276 | import os, os.path
import shutil
import time
from play.utils import *
COMMANDS = ['eclipsify', 'ec']
HELP = {
'eclipsify': 'Create all Eclipse configuration files'
}
def execute(**kargs):
app = kargs.get("app")
args = kargs.get("args")
play_env = kargs.get("env")
is_application = os.path.exists(os.path.join(app.path, 'conf', 'application.conf'))
if is_application:
app.check()
app.check_jpda()
modules = app.modules()
classpath = app.getClasspath()
# determine the name of the project
# if this is an application, the name of the project is in the application.conf file
# if this is a module, we infer the name from the path
application_name = app.readConf('application.name')
vm_arguments = app.readConf('jvm.memory')
javaVersion = getJavaVersion()
print "~ using java version \"%s\"" % javaVersion
if javaVersion.startswith("1.7"):
# JDK 7 compat
vm_arguments = vm_arguments +' -XX:-UseSplitVerifier'
elif javaVersion.startswith("1.8"):
# JDK 8 compatible
vm_arguments = vm_arguments +' -noverify'
if application_name:
application_name = application_name.replace("/", " ")
else:
application_name = os.path.basename(app.path)
dotProject = os.path.join(app.path, '.project')
dotClasspath = os.path.join(app.path, '.classpath')
dotSettings = os.path.join(app.path, '.settings')
eclipse = os.path.join(app.path, 'eclipse')
if os.path.exists(eclipse):
shutil.rmtree(eclipse)
if os.name == 'nt':
time.sleep(1)
if os.path.exists(dotSettings):
shutil.rmtree(dotSettings)
if os.name == 'nt':
time.sleep(1)
shutil.copyfile(os.path.join(play_env["basedir"], 'resources/eclipse/.project'), dotProject)
shutil.copyfile(os.path.join(play_env["basedir"], 'resources/eclipse/.classpath'), dotClasspath)
if is_application:
shutil.copytree(os.path.join(play_env["basedir"], 'resources/eclipse'), eclipse)
shutil.copytree(os.path.join(play_env["basedir"], 'resources/eclipse/.settings'), dotSettings)
replaceAll(dotProject, r'%PROJECT_NAME%', application_name)
playJarPath = os.path.join(play_env["basedir"], 'framework', 'play-%s.jar' % play_env['version'])
playSourcePath = os.path.join(os.path.dirname(playJarPath), 'src')
if os.name == 'nt':
playSourcePath=playSourcePath.replace('\\','/').capitalize()
cpJarToSource = {}
lib_src = os.path.join(app.path, 'tmp/lib-src')
for el in classpath:
# library sources jars in the lib directory
if os.path.basename(el) != "conf" and el.endswith('-sources.jar'):
cpJarToSource[el.replace('-sources', '')] = el
# pointers to source jars produced by 'play deps'
src_file = os.path.join(lib_src, os.path.basename(el) + '.src')
if os.path.exists(src_file):
f = file(src_file)
cpJarToSource[el] = f.readline().rstrip()
f.close()
javadocLocation = {}
for el in classpath:
urlFile = el.replace(r'.jar','.docurl')
if os.path.basename(el) != "conf" and os.path.exists(urlFile):
javadocLocation[el] = urlFile
cpXML = ""
for el in classpath:
if os.path.basename(el) != "conf":
if el == playJarPath:
cpXML += '<classpathentry kind="lib" path="%s" sourcepath="%s" />\n\t' % (os.path.normpath(el) , playSourcePath)
else:
if cpJarToSource.has_key(el):
cpXML += '<classpathentry kind="lib" path="%s" sourcepath="%s"/>\n\t' % (os.path.normpath(el), cpJarToSource[el])
else:
if javadocLocation.has_key(el):
cpXML += '<classpathentry kind="lib" path="%s">\n\t\t' % os.path.normpath(el)
cpXML += '<attributes>\n\t\t\t'
f = file(javadocLocation[el])
url = f.readline()
f.close()
cpXML += '<attribute name="javadoc_location" value="%s"/>\n\t\t' % (url.strip())
cpXML += '</attributes>\n\t'
cpXML += '</classpathentry>\n\t'
else:
cpXML += '<classpathentry kind="lib" path="%s"/>\n\t' % os.path.normpath(el)
if not is_application:
cpXML += '<classpathentry kind="src" path="src"/>'
replaceAll(dotClasspath, r'%PROJECTCLASSPATH%', cpXML)
# generate source path for test folder if one exists
cpTEST = ""
if os.path.exists(os.path.join(app.path, 'test')):
cpTEST += '<classpathentry kind="src" path="test"/>'
replaceAll(dotClasspath, r'%TESTCLASSPATH%', cpTEST)
if len(modules):
lXML = ""
cXML = ""
for module in modules:
lXML += '<link><name>%s</name><type>2</type><location>%s</location></link>\n' % (os.path.basename(module), os.path.join(module, 'app').replace('\\', '/'))
if os.path.exists(os.path.join(module, "conf")):
lXML += '<link><name>conf/%s</name><type>2</type><location>%s/conf</location></link>\n' % (os.path.basename(module), module.replace('\\', '/'))
if os.path.exists(os.path.join(module, "public")):
lXML += '<link><name>public/%s</name><type>2</type><location>%s/public</location></link>\n' % (os.path.basename(module), module.replace('\\', '/'))
cXML += '<classpathentry kind="src" path="%s"/>\n\t' % (os.path.basename(module))
replaceAll(dotProject, r'%LINKS%', '<linkedResources>%s</linkedResources>' % lXML)
replaceAll(dotClasspath, r'%MODULES%', cXML)
else:
replaceAll(dotProject, r'%LINKS%', '')
replaceAll(dotClasspath, r'%MODULES%', '')
if is_application:
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%PROJECT_NAME%', application_name)
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%PLAY_BASE%', play_env["basedir"])
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%PLAY_ID%', play_env["id"])
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%JPDA_PORT%', str(app.jpda_port))
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%PLAY_VERSION%', play_env["version"])
replaceAll(os.path.join(app.path, 'eclipse/debug.launch'), r'%VM_ARGUMENTS%', vm_arguments)
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%PROJECT_NAME%', application_name)
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%PLAY_BASE%', play_env["basedir"])
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%PLAY_ID%', play_env["id"])
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%JPDA_PORT%', str(app.jpda_port))
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%PLAY_VERSION%', play_env["version"])
replaceAll(os.path.join(app.path, 'eclipse/test.launch'), r'%VM_ARGUMENTS%', vm_arguments)
replaceAll(os.path.join(app.path, 'eclipse/connect.launch'), r'%PROJECT_NAME%', application_name)
replaceAll(os.path.join(app.path, 'eclipse/connect.launch'), r'%JPDA_PORT%', str(app.jpda_port))
os.rename(os.path.join(app.path, 'eclipse/connect.launch'), os.path.join(app.path, 'eclipse/Connect JPDA to %s.launch' % application_name))
os.rename(os.path.join(app.path, 'eclipse/test.launch'), os.path.join(app.path, 'eclipse/Test %s.launch' % application_name))
os.rename(os.path.join(app.path, 'eclipse/debug.launch'), os.path.join(app.path, 'eclipse/%s.launch' % application_name))
if is_application:
print "~ OK, the application \"%s\" is ready for eclipse" % application_name
else:
print "~ OK, the module \"%s\" is ready for eclipse" % application_name
print "~ Use File/Import/General/Existing project to import %s into eclipse" % os.path.normpath(app.path)
print "~"
print "~ Use eclipsify again when you want to update eclipse configuration files."
print "~ However, it's often better to delete and re-import the project into your workspace since eclipse keeps dirty caches..."
print "~"
| gpl-3.0 |
david30907d/feedback_django | spirit/category/admin/forms.py | 4 | 1235 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_text
from ..models import Category
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ("parent", "title", "description", "is_global", "is_closed", "is_removed")
def __init__(self, *args, **kwargs):
super(CategoryForm, self).__init__(*args, **kwargs)
queryset = Category.objects.visible().parents()
if self.instance.pk:
queryset = queryset.exclude(pk=self.instance.pk)
self.fields['parent'] = forms.ModelChoiceField(queryset=queryset, required=False)
self.fields['parent'].label_from_instance = lambda obj: smart_text(obj.title)
def clean_parent(self):
parent = self.cleaned_data["parent"]
if self.instance.pk:
has_childrens = self.instance.category_set.all().exists()
if parent and has_childrens:
raise forms.ValidationError(_("The category you are updating "
"can not have a parent since it has childrens"))
return parent
| mit |
alrifqi/django | django/contrib/admin/bin/compress.py | 100 | 2075 | #!/usr/bin/env python
import argparse
import os
import subprocess
import sys
js_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'static', 'admin', 'js')
def main():
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('file', nargs='*')
parser.add_argument("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_argument("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_argument("-q", "--quiet",
action="store_false", dest="verbose")
options = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit(
"Google Closure compiler jar file %s not found. Please use the -c "
"option to specify the path." % compiler
)
if not options.file:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
files = [os.path.join(js_path, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
else:
files = options.file
for file_name in files:
if not file_name.endswith(".js"):
file_name = file_name + ".js"
to_compress = os.path.expanduser(file_name)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(file_name.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.