repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
rduivenvoorde/QGIS | python/console/console_output.py | 13 | 10797 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Python Console for QGIS
-------------------
begin : 2012-09-10
copyright : (C) 2012 by Salvatore Larosa
email : lrssvtml (at) gmail (dot) com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
Some portions of code were taken from https://code.google.com/p/pydee/
"""
from qgis.PyQt.QtCore import Qt, QCoreApplication, QThread, QMetaObject, Q_RETURN_ARG, Q_ARG, QObject, pyqtSlot
from qgis.PyQt.QtGui import QColor, QFont, QKeySequence, QFontDatabase
from qgis.PyQt.QtWidgets import QGridLayout, QSpacerItem, QSizePolicy, QShortcut, QMenu, QApplication
from qgis.PyQt.Qsci import QsciScintilla
from qgis.core import Qgis, QgsApplication, QgsSettings
from qgis.gui import QgsMessageBar, QgsCodeEditorPython
import sys
class writeOut(QObject):
ERROR_COLOR = "#e31a1c"
def __init__(self, shellOut, out=None, style=None):
"""
This class allows writing to stdout and stderr
"""
super().__init__()
self.sO = shellOut
self.out = None
self.style = style
self.fire_keyboard_interrupt = False
@pyqtSlot(str)
def write(self, m):
# This manage the case when console is called from another thread
if QThread.currentThread() != QCoreApplication.instance().thread():
QMetaObject.invokeMethod(self, "write", Qt.QueuedConnection, Q_ARG(str, m))
return
if self.style == "_traceback":
# Show errors in red
stderrColor = QColor(self.sO.settings.value("pythonConsole/stderrFontColor", QColor(self.ERROR_COLOR)))
self.sO.SendScintilla(QsciScintilla.SCI_STYLESETFORE, 0o01, stderrColor)
self.sO.SendScintilla(QsciScintilla.SCI_STYLESETITALIC, 0o01, True)
self.sO.SendScintilla(QsciScintilla.SCI_STYLESETBOLD, 0o01, True)
pos = self.sO.SendScintilla(QsciScintilla.SCI_GETCURRENTPOS)
self.sO.SendScintilla(QsciScintilla.SCI_STARTSTYLING, pos, 31)
self.sO.append(m)
self.sO.SendScintilla(QsciScintilla.SCI_SETSTYLING, len(m), 0o01)
else:
self.sO.append(m)
if self.out:
self.out.write(m)
self.move_cursor_to_end()
if self.style != "_traceback":
self.sO.repaint()
if self.fire_keyboard_interrupt:
self.fire_keyboard_interrupt = False
raise KeyboardInterrupt
def move_cursor_to_end(self):
"""Move cursor to end of text"""
line, index = self.get_end_pos()
self.sO.setCursorPosition(line, index)
self.sO.ensureCursorVisible()
self.sO.ensureLineVisible(line)
def get_end_pos(self):
"""Return (line, index) position of the last character"""
line = self.sO.lines() - 1
return (line, len(self.sO.text(line)))
def flush(self):
pass
def isatty(self):
return False
class ShellOutputScintilla(QgsCodeEditorPython):
def __init__(self, parent=None):
super().__init__(parent)
self.parent = parent
self.shell = self.parent.shell
self.settings = QgsSettings()
# Creates layout for message bar
self.layout = QGridLayout(self)
self.layout.setContentsMargins(0, 0, 0, 0)
spacerItem = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.layout.addItem(spacerItem, 1, 0, 1, 1)
# messageBar instance
self.infoBar = QgsMessageBar()
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.infoBar.setSizePolicy(sizePolicy)
self.layout.addWidget(self.infoBar, 0, 0, 1, 1)
sys.stdout = writeOut(self, sys.stdout)
sys.stderr = writeOut(self, sys.stderr, "_traceback")
self.insertInitText()
self.refreshSettingsOutput()
self.setMinimumHeight(120)
self.setWrapMode(QsciScintilla.WrapCharacter)
self.SendScintilla(QsciScintilla.SCI_SETHSCROLLBAR, 0)
self.runScut = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_E), self)
self.runScut.setContext(Qt.WidgetShortcut)
self.runScut.activated.connect(self.enteredSelected)
# Reimplemented copy action to prevent paste prompt (>>>,...) in command view
self.copyShortcut = QShortcut(QKeySequence.Copy, self)
self.copyShortcut.setContext(Qt.WidgetWithChildrenShortcut)
self.copyShortcut.activated.connect(self.copy)
self.selectAllShortcut = QShortcut(QKeySequence.SelectAll, self)
self.selectAllShortcut.setContext(Qt.WidgetWithChildrenShortcut)
self.selectAllShortcut.activated.connect(self.selectAll)
def insertInitText(self):
txtInit = QCoreApplication.translate("PythonConsole",
"Python Console\n"
"Use iface to access QGIS API interface or Type help(iface) for more info\n"
"Security warning: typing commands from an untrusted source can harm your computer")
# some translation string for the console header ends without '\n'
# and the first command in console will be appended at the header text.
# The following code add a '\n' at the end of the string if not present.
if txtInit.endswith('\n'):
self.setText(txtInit)
else:
self.setText(txtInit + '\n')
def initializeLexer(self):
super().initializeLexer()
self.setFoldingVisible(False)
self.setEdgeMode(QsciScintilla.EdgeNone)
def refreshSettingsOutput(self):
# Set Python lexer
self.initializeLexer()
self.setReadOnly(True)
self.setCaretWidth(0) # NO (blinking) caret in the output
def clearConsole(self):
self.setText('')
self.insertInitText()
self.shell.setFocus()
def contextMenuEvent(self, e):
menu = QMenu(self)
menu.addAction(QgsApplication.getThemeIcon("console/iconHideToolConsole.svg"),
QCoreApplication.translate("PythonConsole", "Hide/Show Toolbar"),
self.hideToolBar)
menu.addSeparator()
showEditorAction = menu.addAction(
QgsApplication.getThemeIcon("console/iconShowEditorConsole.svg"),
QCoreApplication.translate("PythonConsole", "Show Editor"),
self.showEditor)
menu.addSeparator()
runAction = menu.addAction(QgsApplication.getThemeIcon("console/mIconRunConsole.svg"),
QCoreApplication.translate("PythonConsole", "Enter Selected"),
self.enteredSelected,
QKeySequence(Qt.CTRL + Qt.Key_E))
clearAction = menu.addAction(QgsApplication.getThemeIcon("console/iconClearConsole.svg"),
QCoreApplication.translate("PythonConsole", "Clear Console"),
self.clearConsole)
pyQGISHelpAction = menu.addAction(QgsApplication.getThemeIcon("console/iconHelpConsole.svg"),
QCoreApplication.translate("PythonConsole", "Search Selected in PyQGIS docs"),
self.searchSelectedTextInPyQGISDocs)
menu.addSeparator()
copyAction = menu.addAction(
QgsApplication.getThemeIcon("mActionEditCopy.svg"),
QCoreApplication.translate("PythonConsole", "Copy"),
self.copy, QKeySequence.Copy)
selectAllAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Select All"),
self.selectAll, QKeySequence.SelectAll)
menu.addSeparator()
menu.addAction(QgsApplication.getThemeIcon("console/iconSettingsConsole.svg"),
QCoreApplication.translate("PythonConsole", "Options…"),
self.parent.openSettings)
runAction.setEnabled(False)
clearAction.setEnabled(False)
copyAction.setEnabled(False)
pyQGISHelpAction.setEnabled(False)
selectAllAction.setEnabled(False)
showEditorAction.setEnabled(True)
if self.hasSelectedText():
runAction.setEnabled(True)
copyAction.setEnabled(True)
pyQGISHelpAction.setEnabled(True)
if not self.text(3) == '':
selectAllAction.setEnabled(True)
clearAction.setEnabled(True)
if self.parent.tabEditorWidget.isVisible():
showEditorAction.setEnabled(False)
menu.exec_(self.mapToGlobal(e.pos()))
def hideToolBar(self):
tB = self.parent.toolBar
tB.hide() if tB.isVisible() else tB.show()
self.shell.setFocus()
def showEditor(self):
Ed = self.parent.splitterObj
if not Ed.isVisible():
Ed.show()
self.parent.showEditorButton.setChecked(True)
self.shell.setFocus()
def copy(self):
"""Copy text to clipboard... or keyboard interrupt"""
if self.hasSelectedText():
text = self.selectedText()
text = text.replace('>>> ', '').replace('... ', '').strip() # removing prompts
QApplication.clipboard().setText(text)
else:
raise KeyboardInterrupt
def enteredSelected(self):
cmd = self.selectedText()
self.shell.insertFromDropPaste(cmd)
self.shell.entered()
def keyPressEvent(self, e):
# empty text indicates possible shortcut key sequence so stay in output
txt = e.text()
if len(txt) and txt >= " ":
self.shell.append(txt)
self.shell.move_cursor_to_end()
self.shell.setFocus()
e.ignore()
else:
# possible shortcut key sequence, accept it
e.accept()
def widgetMessageBar(self, iface, text):
timeout = iface.messageTimeout()
self.infoBar.pushMessage(text, Qgis.Info, timeout)
| gpl-2.0 |
rajalokan/nova | nova/tests/unit/objects/test_image_meta.py | 2 | 13479 | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from nova import exception
from nova import objects
from nova.objects import fields
from nova import test
class TestImageMeta(test.NoDBTestCase):
def test_basic_attrs(self):
image = {'status': 'active',
'container_format': 'bare',
'min_ram': 0,
'updated_at': '2014-12-12T11:16:36.000000',
# Testing string -> int conversion
'min_disk': '0',
'owner': '2d8b9502858c406ebee60f0849486222',
# Testing string -> bool conversion
'protected': 'yes',
'properties': {
'os_type': 'Linux',
'hw_video_model': 'vga',
'hw_video_ram': '512',
'hw_qemu_guest_agent': 'yes',
'hw_scsi_model': 'virtio-scsi',
},
'size': 213581824,
'name': 'f16-x86_64-openstack-sda',
'checksum': '755122332caeb9f661d5c978adb8b45f',
'created_at': '2014-12-10T16:23:14.000000',
'disk_format': 'qcow2',
'id': 'c8b1790e-a07d-4971-b137-44f2432936cd'
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('active', image_meta.status)
self.assertEqual('bare', image_meta.container_format)
self.assertEqual(0, image_meta.min_ram)
self.assertIsInstance(image_meta.updated_at, datetime.datetime)
self.assertEqual(0, image_meta.min_disk)
self.assertEqual('2d8b9502858c406ebee60f0849486222', image_meta.owner)
self.assertTrue(image_meta.protected)
self.assertEqual(213581824, image_meta.size)
self.assertEqual('f16-x86_64-openstack-sda', image_meta.name)
self.assertEqual('755122332caeb9f661d5c978adb8b45f',
image_meta.checksum)
self.assertIsInstance(image_meta.created_at, datetime.datetime)
self.assertEqual('qcow2', image_meta.disk_format)
self.assertEqual('c8b1790e-a07d-4971-b137-44f2432936cd', image_meta.id)
self.assertIsInstance(image_meta.properties, objects.ImageMetaProps)
def test_no_props(self):
image_meta = objects.ImageMeta.from_dict({})
self.assertIsInstance(image_meta.properties, objects.ImageMetaProps)
def test_volume_backed_image(self):
image = {'container_format': None,
'size': 0,
'checksum': None,
'disk_format': None,
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('', image_meta.container_format)
self.assertEqual(0, image_meta.size)
self.assertEqual('', image_meta.checksum)
self.assertEqual('', image_meta.disk_format)
def test_null_substitution(self):
image = {'name': None,
'checksum': None,
'owner': None,
'size': None,
'virtual_size': None,
'container_format': None,
'disk_format': None,
}
image_meta = objects.ImageMeta.from_dict(image)
self.assertEqual('', image_meta.name)
self.assertEqual('', image_meta.checksum)
self.assertEqual('', image_meta.owner)
self.assertEqual(0, image_meta.size)
self.assertEqual(0, image_meta.virtual_size)
self.assertEqual('', image_meta.container_format)
self.assertEqual('', image_meta.disk_format)
class TestImageMetaProps(test.NoDBTestCase):
def test_normal_props(self):
props = {'os_type': 'windows',
'hw_video_model': 'vga',
'hw_video_ram': '512',
'hw_qemu_guest_agent': 'yes',
# Fill sane values for the rest here
}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual('windows', virtprops.os_type)
self.assertEqual('vga', virtprops.hw_video_model)
self.assertEqual(512, virtprops.hw_video_ram)
self.assertTrue(virtprops.hw_qemu_guest_agent)
def test_default_props(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
for prop in virtprops.fields:
self.assertIsNone(virtprops.get(prop))
def test_default_prop_value(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual("hvm", virtprops.get("hw_vm_mode", "hvm"))
def test_non_existent_prop(self):
props = {}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertRaises(AttributeError,
virtprops.get,
"doesnotexist")
def test_legacy_compat(self):
legacy_props = {
'architecture': 'x86_64',
'owner_id': '123',
'vmware_adaptertype': 'lsiLogic',
'vmware_disktype': 'preallocated',
'vmware_image_version': '2',
'vmware_ostype': 'rhel3_64Guest',
'auto_disk_config': 'yes',
'ipxe_boot': 'yes',
'xenapi_device_id': '3',
'xenapi_image_compression_level': '2',
'vmware_linked_clone': 'false',
'xenapi_use_agent': 'yes',
'xenapi_skip_agent_inject_ssh': 'no',
'xenapi_skip_agent_inject_files_at_boot': 'no',
'cache_in_nova': 'yes',
'vm_mode': 'hvm',
'bittorrent': 'yes',
'mappings': [],
'block_device_mapping': [],
'bdm_v2': 'yes',
'root_device_name': '/dev/vda',
'hypervisor_version_requires': '>=1.5.3',
'hypervisor_type': 'qemu',
}
image_meta = objects.ImageMetaProps.from_dict(legacy_props)
self.assertEqual('x86_64', image_meta.hw_architecture)
self.assertEqual('123', image_meta.img_owner_id)
self.assertEqual('lsilogic', image_meta.hw_scsi_model)
self.assertEqual('preallocated', image_meta.hw_disk_type)
self.assertEqual(2, image_meta.img_version)
self.assertEqual('rhel3_64Guest', image_meta.os_distro)
self.assertTrue(image_meta.hw_auto_disk_config)
self.assertTrue(image_meta.hw_ipxe_boot)
self.assertEqual(3, image_meta.hw_device_id)
self.assertEqual(2, image_meta.img_compression_level)
self.assertFalse(image_meta.img_linked_clone)
self.assertTrue(image_meta.img_use_agent)
self.assertFalse(image_meta.os_skip_agent_inject_ssh)
self.assertFalse(image_meta.os_skip_agent_inject_files_at_boot)
self.assertTrue(image_meta.img_cache_in_nova)
self.assertTrue(image_meta.img_bittorrent)
self.assertEqual([], image_meta.img_mappings)
self.assertEqual([], image_meta.img_block_device_mapping)
self.assertTrue(image_meta.img_bdm_v2)
self.assertEqual("/dev/vda", image_meta.img_root_device_name)
self.assertEqual('>=1.5.3', image_meta.img_hv_requested_version)
self.assertEqual('qemu', image_meta.img_hv_type)
def test_legacy_compat_vmware_adapter_types(self):
legacy_types = ['lsiLogic', 'busLogic', 'ide', 'lsiLogicsas',
'paraVirtual', None, '']
for legacy_type in legacy_types:
legacy_props = {
'vmware_adaptertype': legacy_type,
}
image_meta = objects.ImageMetaProps.from_dict(legacy_props)
if legacy_type == 'ide':
self.assertEqual('ide', image_meta.hw_disk_bus)
elif not legacy_type:
self.assertFalse(image_meta.obj_attr_is_set('hw_disk_bus'))
self.assertFalse(image_meta.obj_attr_is_set('hw_scsi_model'))
else:
self.assertEqual('scsi', image_meta.hw_disk_bus)
if legacy_type == 'lsiLogicsas':
expected = 'lsisas1068'
elif legacy_type == 'paraVirtual':
expected = 'vmpvscsi'
else:
expected = legacy_type.lower()
self.assertEqual(expected, image_meta.hw_scsi_model)
def test_duplicate_legacy_and_normal_props(self):
# Both keys are referring to the same object field
props = {'hw_scsi_model': 'virtio-scsi',
'vmware_adaptertype': 'lsiLogic',
}
virtprops = objects.ImageMetaProps.from_dict(props)
# The normal property always wins vs. the legacy field since
# _set_attr_from_current_names is called finally
self.assertEqual('virtio-scsi', virtprops.hw_scsi_model)
def test_get(self):
props = objects.ImageMetaProps(os_distro='linux')
self.assertEqual('linux', props.get('os_distro'))
self.assertIsNone(props.get('img_version'))
self.assertEqual(1, props.get('img_version', 1))
def test_set_numa_mem(self):
props = {'hw_numa_nodes': 2,
'hw_numa_mem.0': "2048",
'hw_numa_mem.1': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([2048, 4096], virtprops.hw_numa_mem)
def test_set_numa_mem_sparse(self):
props = {'hw_numa_nodes': 2,
'hw_numa_mem.0': "2048",
'hw_numa_mem.1': "1024",
'hw_numa_mem.3': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([2048, 1024], virtprops.hw_numa_mem)
def test_set_numa_mem_no_count(self):
props = {'hw_numa_mem.0': "2048",
'hw_numa_mem.3': "4096"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIsNone(virtprops.get("hw_numa_nodes"))
self.assertEqual([2048], virtprops.hw_numa_mem)
def test_set_numa_cpus(self):
props = {'hw_numa_nodes': 2,
'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.1': "4-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(2, virtprops.hw_numa_nodes)
self.assertEqual([set([0, 1, 2, 3]), set([4, 5, 6, 7])],
virtprops.hw_numa_cpus)
def test_set_numa_cpus_sparse(self):
props = {'hw_numa_nodes': 4,
'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.1': "4,5",
'hw_numa_cpus.3': "6-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertEqual(4, virtprops.hw_numa_nodes)
self.assertEqual([set([0, 1, 2, 3]), set([4, 5])],
virtprops.hw_numa_cpus)
def test_set_numa_cpus_no_count(self):
props = {'hw_numa_cpus.0': "0-3",
'hw_numa_cpus.3': "4-7"}
virtprops = objects.ImageMetaProps.from_dict(props)
self.assertIsNone(virtprops.get("hw_numa_nodes"))
self.assertEqual([set([0, 1, 2, 3])],
virtprops.hw_numa_cpus)
def test_obj_make_compatible(self):
props = {
'hw_firmware_type': 'uefi',
'hw_cpu_realtime_mask': '^0-1',
'hw_cpu_thread_policy': 'prefer',
'img_config_drive': 'mandatory',
'os_admin_user': 'root',
'hw_vif_multiqueue_enabled': True,
'img_hv_type': 'kvm',
'img_hv_requested_version': '>= 1.0',
'os_require_quiesce': True,
'os_secure_boot': 'required',
'hw_rescue_bus': 'ide',
'hw_rescue_device': 'disk',
'hw_watchdog_action': fields.WatchdogAction.DISABLED,
}
obj = objects.ImageMetaProps(**props)
primitive = obj.obj_to_primitive('1.0')
self.assertFalse(any([x in primitive['nova_object.data']
for x in props]))
for bus in ('lxc', 'uml'):
obj.hw_disk_bus = bus
self.assertRaises(exception.ObjectActionError,
obj.obj_to_primitive, '1.0')
def test_obj_make_compatible_watchdog_action_not_disabled(self):
"""Tests that we don't pop the hw_watchdog_action if the value is not
'disabled'.
"""
obj = objects.ImageMetaProps(
hw_watchdog_action=fields.WatchdogAction.PAUSE)
primitive = obj.obj_to_primitive('1.0')
self.assertIn('hw_watchdog_action', primitive['nova_object.data'])
self.assertEqual(fields.WatchdogAction.PAUSE,
primitive['nova_object.data']['hw_watchdog_action'])
def test_set_os_secure_boot(self):
props = {'os_secure_boot': "required"}
secure_props = objects.ImageMetaProps.from_dict(props)
self.assertEqual("required", secure_props.os_secure_boot)
| apache-2.0 |
tensor-tang/Paddle | python/paddle/fluid/tests/unittests/test_rpn_target_assign_op.py | 2 | 15829 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
from test_anchor_generator_op import anchor_generator_in_python
from test_generate_proposal_labels_op import _generate_groundtruth
from test_generate_proposal_labels_op import _bbox_overlaps, _box_to_delta
def rpn_target_assign(anchor_by_gt_overlap,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= rpn_positive_overlap] = 1
num_fg = int(rpn_fg_fraction * rpn_batch_size_per_im)
fg_inds = np.where(labels == 1)[0]
if len(fg_inds) > num_fg and use_random:
disable_inds = np.random.choice(
fg_inds, size=(len(fg_inds) - num_fg), replace=False)
else:
disable_inds = fg_inds[num_fg:]
labels[disable_inds] = -1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
num_bg = rpn_batch_size_per_im - np.sum(labels == 1)
bg_inds = np.where(anchor_to_gt_max < rpn_negative_overlap)[0]
if len(bg_inds) > num_bg and use_random:
enable_inds = bg_inds[np.random.randint(len(bg_inds), size=num_bg)]
else:
enable_inds = bg_inds[:num_bg]
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
labels = labels[score_index]
assert not np.any(labels == -1), "Wrong labels with -1"
gt_inds = anchor_to_gt_argmax[loc_index]
return loc_index, score_index, labels, gt_inds, bbox_inside_weight
def get_anchor(n, c, h, w):
input_feat = np.random.random((n, c, h, w)).astype('float32')
anchors, _ = anchor_generator_in_python(
input_feat=input_feat,
anchor_sizes=[32., 64.],
aspect_ratios=[0.5, 1.0],
variances=[1.0, 1.0, 1.0, 1.0],
stride=[16.0, 16.0],
offset=0.5)
return anchors
def rpn_target_assign_in_python(all_anchors,
gt_boxes,
is_crowd,
im_info,
lod,
rpn_straddle_thresh,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_height = im_info[i][0]
im_width = im_info[i][1]
im_scale = im_info[i][2]
if rpn_straddle_thresh >= 0:
# Only keep anchors inside the image by a margin of straddle_thresh
inds_inside = np.where(
(all_anchors[:, 0] >= -rpn_straddle_thresh) &
(all_anchors[:, 1] >= -rpn_straddle_thresh) & (
all_anchors[:, 2] < im_width + rpn_straddle_thresh) & (
all_anchors[:, 3] < im_height + rpn_straddle_thresh))[0]
# keep only inside anchors
inside_anchors = all_anchors[inds_inside, :]
else:
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight = \
rpn_target_assign(iou, rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights
def retinanet_target_assign(anchor_by_gt_overlap, gt_labels, positive_overlap,
negative_overlap):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= positive_overlap] = 1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
bg_inds = np.where(anchor_to_gt_max < negative_overlap)[0]
enable_inds = bg_inds
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
score_index_tmp = np.hstack([fg_inds])
labels = labels[score_index]
gt_inds = anchor_to_gt_argmax[loc_index]
label_inds = anchor_to_gt_argmax[score_index_tmp]
labels[0:len(fg_inds)] = np.squeeze(gt_labels[label_inds])
fg_num = len(fg_fake_inds) + len(fg_inds) + 1
assert not np.any(labels == -1), "Wrong labels with -1"
return loc_index, score_index, labels, gt_inds, bbox_inside_weight, fg_num
def retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels,
is_crowd, im_info, lod, positive_overlap,
negative_overlap):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_scale = im_info[i][2]
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
gt_labels_slice = gt_labels[b:e, :]
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
gt_labels_slice = gt_labels_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight, fg_num = \
retinanet_target_assign(iou, gt_labels_slice,
positive_overlap, negative_overlap)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
fg_nums = [[fg_num]]
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
fg_nums = np.concatenate([fg_nums, [[fg_num]]])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights, fg_nums
class TestRpnTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
#images_shape = [[64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
#lod = [0, 4]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
rpn_straddle_thresh = 0.0
rpn_batch_size_per_im = 256
rpn_positive_overlap = 0.7
rpn_negative_overlap = 0.3
rpn_fg_fraction = 0.5
use_random = False
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights = \
rpn_target_assign_in_python(all_anchors, gt_boxes, is_crowd,
im_info, lod, rpn_straddle_thresh,
rpn_batch_size_per_im, rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction, use_random)
labels = labels[:, np.newaxis]
self.op_type = "rpn_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'rpn_batch_size_per_im': rpn_batch_size_per_im,
'rpn_straddle_thresh': rpn_straddle_thresh,
'rpn_positive_overlap': rpn_positive_overlap,
'rpn_negative_overlap': rpn_negative_overlap,
'rpn_fg_fraction': rpn_fg_fraction,
'use_random': use_random
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32')
}
def test_check_output(self):
self.check_output()
class TestRetinanetTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
gt_labels = np.vstack([
v['gt_classes'].reshape(len(v['gt_classes']), 1)
for v in groundtruth
])
gt_labels = gt_labels.reshape(len(gt_labels), 1)
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
gt_labels = gt_labels.astype('int32')
positive_overlap = 0.5
negative_overlap = 0.4
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights, fg_num = \
retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels, is_crowd,
im_info, lod, positive_overlap, negative_overlap)
labels = labels[:, np.newaxis]
self.op_type = "retinanet_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'GtLabels': (gt_labels, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'positive_overlap': positive_overlap,
'negative_overlap': negative_overlap
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32'),
'ForegroundNumber': fg_num.astype('int32')
}
def test_check_output(self):
self.check_output()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
dyoung418/tensorflow | tensorflow/contrib/ffmpeg/encode_audio_op_test.py | 54 | 4151 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for third_party.tensorflow.contrib.ffmpeg.encode_audio_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import six
from tensorflow.contrib import ffmpeg
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
class EncodeAudioOpTest(test.TestCase):
def setUp(self):
super(EncodeAudioOpTest, self).setUp()
path = os.path.join(resource_loader.get_data_files_path(),
'testdata/mono_10khz.wav')
with open(path, 'rb') as f:
self._contents = f.read()
def _compareWavFiles(self, original, encoded):
"""Compares the important bits of two WAV files.
Some encoders will create a slightly different header to the WAV file.
This compares only the important bits of the header as well as the contents.
Args:
original: Contents of the original .wav file.
encoded: Contents of the new, encoded .wav file.
"""
self.assertLess(44, len(original))
self.assertLess(44, len(encoded))
self.assertEqual(original[:4], encoded[:4])
# Skip file size
self.assertEqual(original[8:16], encoded[8:16])
# Skip header size
self.assertEqual(original[20:36], encoded[20:36])
# Skip extra bits inserted by ffmpeg.
self.assertEqual(original[original.find(b'data'):],
encoded[encoded.find(b'data'):])
def testRoundTrip(self):
"""Reads a wav file, writes it, and compares them."""
with self.test_session():
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=10000,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=10000)
encoded_contents = encode_op.eval()
self._compareWavFiles(self._contents, encoded_contents)
def testRoundTripWithPlaceholderSampleRate(self):
with self.test_session():
placeholder = array_ops.placeholder(dtypes.int32)
audio_op = ffmpeg.decode_audio(
self._contents,
file_format='wav',
samples_per_second=placeholder,
channel_count=1)
encode_op = ffmpeg.encode_audio(
audio_op, file_format='wav', samples_per_second=placeholder)
encoded_contents = encode_op.eval(feed_dict={placeholder: 10000})
self._compareWavFiles(self._contents, encoded_contents)
def testFloatingPointSampleRateInvalid(self):
with self.test_session():
with self.assertRaises(TypeError):
ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=12345.678)
def testZeroSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=0)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
def testNegativeSampleRateInvalid(self):
with self.test_session() as sess:
encode_op = ffmpeg.encode_audio(
[[0.0], [1.0]],
file_format='wav',
samples_per_second=-2)
with six.assertRaisesRegex(self, Exception, 'must be positive'):
sess.run(encode_op)
if __name__ == '__main__':
test.main()
| apache-2.0 |
vitaly4uk/django | tests/generic_views/test_list.py | 309 | 12129 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.utils.encoding import force_str
from django.views.generic.base import View
from .models import Artist, Author, Book, Page
@override_settings(ROOT_URLCONF='generic_views.urls')
class ListViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name='Rene Magritte')
cls.author1 = Author.objects.create(name='Roberto Bolaño', slug='roberto-bolano')
cls.author2 = Author.objects.create(name='Scott Rosenberg', slug='scott-rosenberg')
cls.book1 = Book.objects.create(name='2066', slug='2066', pages=800, pubdate=datetime.date(2008, 10, 1))
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name='Dreaming in Code', slug='dreaming-in-code', pages=300, pubdate=datetime.date(2006, 5, 1)
)
cls.page1 = Page.objects.create(
content='I was once bitten by a moose.', template='generic_views/page_template.html'
)
def test_items(self):
res = self.client.get('/list/dict/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/list.html')
self.assertEqual(res.context['object_list'][0]['first'], 'John')
def test_queryset(self):
res = self.client.get('/list/authors/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIsInstance(res.context['view'], View)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertIsNone(res.context['paginator'])
self.assertIsNone(res.context['page_obj'])
self.assertFalse(res.context['is_paginated'])
def test_paginated_queryset(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTrue(res.context['is_paginated'])
self.assertEqual(res.context['page_obj'].number, 1)
self.assertEqual(res.context['paginator'].num_pages, 4)
self.assertEqual(res.context['author_list'][0].name, 'Author 00')
self.assertEqual(list(res.context['author_list'])[-1].name, 'Author 29')
def test_paginated_queryset_shortdata(self):
# Test that short datasets ALSO result in a paginated view.
res = self.client.get('/list/authors/paginated/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['page_obj'].number, 1)
self.assertEqual(res.context['paginator'].num_pages, 1)
self.assertFalse(res.context['is_paginated'])
def test_paginated_get_page_by_query_string(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/', {'page': '2'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 30')
self.assertEqual(res.context['page_obj'].number, 2)
def test_paginated_get_last_page_by_query_string(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/', {'page': 'last'})
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context['object_list']), 10)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 90')
self.assertEqual(res.context['page_obj'].number, 4)
def test_paginated_get_page_by_urlvar(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/3/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 60')
self.assertEqual(res.context['page_obj'].number, 3)
def test_paginated_page_out_of_range(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/42/')
self.assertEqual(res.status_code, 404)
def test_paginated_invalid_page(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/?page=frog')
self.assertEqual(res.status_code, 404)
def test_paginated_custom_paginator_class(self):
self._make_authors(7)
res = self.client.get('/list/authors/paginated/custom_class/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['paginator'].num_pages, 1)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context['object_list']), 7)
def test_paginated_custom_page_kwarg(self):
self._make_authors(100)
res = self.client.get('/list/authors/paginated/custom_page_kwarg/', {'pagina': '2'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
self.assertEqual(len(res.context['object_list']), 30)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertEqual(res.context['author_list'][0].name, 'Author 30')
self.assertEqual(res.context['page_obj'].number, 2)
def test_paginated_custom_paginator_constructor(self):
self._make_authors(7)
res = self.client.get('/list/authors/paginated/custom_constructor/')
self.assertEqual(res.status_code, 200)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context['object_list']), 7)
def test_paginated_orphaned_queryset(self):
self._make_authors(92)
res = self.client.get('/list/authors/paginated-orphaned/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 1)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': 'last'})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 3)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': '3'})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 3)
res = self.client.get(
'/list/authors/paginated-orphaned/', {'page': '4'})
self.assertEqual(res.status_code, 404)
def test_paginated_non_queryset(self):
res = self.client.get('/list/dict/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context['object_list']), 1)
def test_verbose_name(self):
res = self.client.get('/list/artists/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/list.html')
self.assertEqual(list(res.context['object_list']), list(Artist.objects.all()))
self.assertIs(res.context['artist_list'], res.context['object_list'])
self.assertIsNone(res.context['paginator'])
self.assertIsNone(res.context['page_obj'])
self.assertFalse(res.context['is_paginated'])
def test_allow_empty_false(self):
res = self.client.get('/list/authors/notempty/')
self.assertEqual(res.status_code, 200)
Author.objects.all().delete()
res = self.client.get('/list/authors/notempty/')
self.assertEqual(res.status_code, 404)
def test_template_name(self):
res = self.client.get('/list/authors/template_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTemplateUsed(res, 'generic_views/list.html')
def test_template_name_suffix(self):
res = self.client.get('/list/authors/template_name_suffix/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTemplateUsed(res, 'generic_views/author_objects.html')
def test_context_object_name(self):
res = self.client.get('/list/authors/context_object_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertNotIn('authors', res.context)
self.assertIs(res.context['author_list'], res.context['object_list'])
self.assertTemplateUsed(res, 'generic_views/author_list.html')
def test_duplicate_context_object_name(self):
res = self.client.get('/list/authors/dupe_context_object_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['object_list']), list(Author.objects.all()))
self.assertNotIn('authors', res.context)
self.assertNotIn('author_list', res.context)
self.assertTemplateUsed(res, 'generic_views/author_list.html')
def test_missing_items(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/list/authors/invalid/')
def test_paginated_list_view_does_not_load_entire_table(self):
# Regression test for #17535
self._make_authors(3)
# 1 query for authors
with self.assertNumQueries(1):
self.client.get('/list/authors/notempty/')
# same as above + 1 query to test if authors exist + 1 query for pagination
with self.assertNumQueries(3):
self.client.get('/list/authors/notempty/paginated/')
def test_explicitly_ordered_list_view(self):
Book.objects.create(name="Zebras for Dummies", pages=800, pubdate=datetime.date(2006, 9, 1))
res = self.client.get('/list/books/sorted/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object_list'][0].name, '2066')
self.assertEqual(res.context['object_list'][1].name, 'Dreaming in Code')
self.assertEqual(res.context['object_list'][2].name, 'Zebras for Dummies')
res = self.client.get('/list/books/sortedbypagesandnamedec/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object_list'][0].name, 'Dreaming in Code')
self.assertEqual(res.context['object_list'][1].name, 'Zebras for Dummies')
self.assertEqual(res.context['object_list'][2].name, '2066')
@override_settings(DEBUG=True)
def test_paginated_list_view_returns_useful_message_on_invalid_page(self):
# test for #19240
# tests that source exception's message is included in page
self._make_authors(1)
res = self.client.get('/list/authors/paginated/2/')
self.assertEqual(res.status_code, 404)
self.assertEqual(force_str(res.context.get('reason')),
"Invalid page (2): That page contains no results")
def _make_authors(self, n):
Author.objects.all().delete()
for i in range(n):
Author.objects.create(name='Author %02i' % i, slug='a%s' % i)
| bsd-3-clause |
tsdmgz/ansible | test/units/modules/network/nxos/test_nxos_vlan.py | 4 | 3951 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_vlan
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosVlanModule(TestNxosModule):
module = nxos_vlan
def setUp(self):
super(TestNxosVlanModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_vlan.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_vlan.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_config = patch('ansible.modules.network.nxos.nxos_vlan.get_config')
self.get_config = self.mock_get_config.start()
def tearDown(self):
super(TestNxosVlanModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, device=''):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item)
command = obj['command']
except ValueError:
command = item
filename = '%s.txt' % str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture('nxos_vlan', filename))
return output
self.run_commands.side_effect = load_from_file
self.load_config.return_value = None
def test_nxos_vlan_range(self):
set_module_args(dict(vlan_range='6-10'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['vlan 6', 'vlan 7', 'vlan 8', 'vlan 9', 'vlan 10'])
def test_nxos_vlan_range_absent(self):
set_module_args(dict(vlan_range='1-5', state='absent'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['no vlan 1'])
def test_nxos_vlan_id(self):
set_module_args(dict(vlan_id='15', state='present'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['vlan 15', 'exit'])
def test_nxos_vlan_id_absent(self):
set_module_args(dict(vlan_id='1', state='absent'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['no vlan 1'])
def test_nxos_vlan_named_vlan(self):
set_module_args(dict(vlan_id='15', name='WEB'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['vlan 15', 'name WEB', 'exit'])
def test_nxos_vlan_shut_down(self):
set_module_args(dict(vlan_id='1', admin_state='down'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['vlan 1', 'shutdown', 'exit'])
def test_nxos_vlan_no_change(self):
set_module_args(dict(vlan_id='1', name='default', vlan_state='active', admin_state='up'))
result = self.execute_module(changed=False)
self.assertEqual(result['commands'], [])
| gpl-3.0 |
acdha/django-test-utils | test_utils/management/commands/relational_dumpdata.py | 3 | 3257 | from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
from optparse import make_option
from django.db.models.fields.related import ForeignKey, ManyToManyField
from django.db.models import get_app, get_apps, get_models
def _relational_dumpdata(app, collected):
objects = []
for mod in get_models(app):
objects.extend(mod._default_manager.all())
#Got models, now get their relationships.
#Thanks to http://www.djangosnippets.org/snippets/918/
related = []
collected = collected.union(set([(x.__class__, x.pk) for x in objects]))
for obj in objects:
for f in obj._meta.fields :
if isinstance(f, ForeignKey):
new = getattr(obj, f.name) # instantiate object
if new and not (new.__class__, new.pk) in collected:
collected.add((new.__class__, new.pk))
related.append(new)
for f in obj._meta.many_to_many:
if isinstance(f, ManyToManyField):
for new in getattr(obj, f.name).all():
if new and not (new.__class__, new.pk) in collected:
collected.add((new.__class__, new.pk))
related.append(new)
if related != []:
objects.extend(related)
return (objects, collected)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.'),
make_option('--indent', default=None, dest='indent', type='int',
help='Specifies the indent level to use when pretty-printing output'),
make_option('-e', '--exclude', dest='exclude',action='append', default=[],
help='App to exclude (use multiple --exclude to exclude multiple apps).'),
)
help = 'Output the contents of the database as a fixture of the given format.'
args = '[appname ...]'
def handle(self, *app_labels, **options):
format = options.get('format','json')
indent = options.get('indent',None)
exclude = options.get('exclude',[])
show_traceback = options.get('traceback', False)
excluded_apps = [get_app(app_label) for app_label in exclude]
if len(app_labels) == 0:
app_list = [app for app in get_apps() if app not in excluded_apps]
else:
app_list = [get_app(app_label) for app_label in app_labels]
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
try:
serializers.get_serializer(format)
except KeyError:
raise CommandError("Unknown serialization format: %s" % format)
objects = []
collected = set()
for app in app_list: #Yey for ghetto recusion
objects, collected = _relational_dumpdata(app, collected)
#****End New stuff
try:
return serializers.serialize(format, objects, indent=indent)
except Exception, e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
| mit |
vmindru/ansible | lib/ansible/modules/notification/logentries_msg.py | 75 | 2254 | #!/usr/bin/python
# Copyright 2017, Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: logentries_msg
version_added: "2.5"
short_description: Send a message to logentries.
description:
- Send a message to logentries
requirements:
- "python >= 2.6"
options:
token:
description:
- Log token.
required: true
msg:
description:
- The message body.
required: true
api:
description:
- API endpoint
default: data.logentries.com
port:
description:
- API endpoint port
default: 80
author: "Jimmy Tang (@jcftang) <jimmy_tang@rapid7.com>"
'''
RETURN = '''# '''
EXAMPLES = '''
- logentries_msg:
token=00000000-0000-0000-0000-000000000000
msg="{{ ansible_hostname }}"
'''
import socket
from ansible.module_utils.basic import AnsibleModule
def send_msg(module, token, msg, api, port):
message = "{} {}\n".format(token, msg)
api_ip = socket.gethostbyname(api)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((api_ip, port))
try:
if not module.check_mode:
s.send(message)
except Exception as e:
module.fail_json(msg="failed to send message, msg=%s" % e)
s.close()
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(type='str', required=True),
msg=dict(type='str', required=True),
api=dict(type='str', default="data.logentries.com"),
port=dict(type='int', default=80)),
supports_check_mode=True
)
token = module.params["token"]
msg = module.params["msg"]
api = module.params["api"]
port = module.params["port"]
changed = False
try:
send_msg(module, token, msg, api, port)
changed = True
except Exception as e:
module.fail_json(msg="unable to send msg: %s" % e)
module.exit_json(changed=changed, msg=msg)
if __name__ == '__main__':
main()
| gpl-3.0 |
SamYaple/ansible-modules-extras | messaging/rabbitmq_exchange.py | 46 | 7117 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Manuel Sousa <manuel.sousa@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: rabbitmq_exchange
author: "Manuel Sousa (@manuel-sousa)"
version_added: "2.0"
short_description: This module manages rabbitMQ exchanges
description:
- This module uses rabbitMQ Rest API to create/delete exchanges
requirements: [ python requests ]
options:
name:
description:
- Name of the exchange to create
required: true
state:
description:
- Whether the exchange should be present or absent
- Only present implemented atm
choices: [ "present", "absent" ]
required: false
default: present
login_user:
description:
- rabbitMQ user for connection
required: false
default: guest
login_password:
description:
- rabbitMQ password for connection
required: false
default: false
login_host:
description:
- rabbitMQ host for connection
required: false
default: localhost
login_port:
description:
- rabbitMQ management api port
required: false
default: 15672
vhost:
description:
- rabbitMQ virtual host
required: false
default: "/"
durable:
description:
- whether exchange is durable or not
required: false
choices: [ "yes", "no" ]
default: yes
exchange_type:
description:
- type for the exchange
required: false
choices: [ "fanout", "direct", "headers", "topic" ]
aliases: [ "type" ]
default: direct
auto_delete:
description:
- if the exchange should delete itself after all queues/exchanges unbound from it
required: false
choices: [ "yes", "no" ]
default: no
internal:
description:
- exchange is available only for other exchanges
required: false
choices: [ "yes", "no" ]
default: no
arguments:
description:
- extra arguments for exchange. If defined this argument is a key/value dictionary
required: false
default: {}
'''
EXAMPLES = '''
# Create direct exchange
- rabbitmq_exchange: name=directExchange
# Create topic exchange on vhost
- rabbitmq_exchange: name=topicExchange type=topic vhost=myVhost
'''
import requests
import urllib
import json
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent'], type='str'),
name = dict(required=True, type='str'),
login_user = dict(default='guest', type='str'),
login_password = dict(default='guest', type='str', no_log=True),
login_host = dict(default='localhost', type='str'),
login_port = dict(default='15672', type='str'),
vhost = dict(default='/', type='str'),
durable = dict(default=True, type='bool'),
auto_delete = dict(default=False, type='bool'),
internal = dict(default=False, type='bool'),
exchange_type = dict(default='direct', aliases=['type'], type='str'),
arguments = dict(default=dict(), type='dict')
),
supports_check_mode = True
)
url = "http://%s:%s/api/exchanges/%s/%s" % (
module.params['login_host'],
module.params['login_port'],
urllib.quote(module.params['vhost'],''),
urllib.quote(module.params['name'],'')
)
# Check if exchange already exists
r = requests.get( url, auth=(module.params['login_user'],module.params['login_password']))
if r.status_code==200:
exchange_exists = True
response = r.json()
elif r.status_code==404:
exchange_exists = False
response = r.text
else:
module.fail_json(
msg = "Invalid response from RESTAPI when trying to check if exchange exists",
details = r.text
)
if module.params['state']=='present':
change_required = not exchange_exists
else:
change_required = exchange_exists
# Check if attributes change on existing exchange
if not change_required and r.status_code==200 and module.params['state'] == 'present':
if not (
response['durable'] == module.params['durable'] and
response['auto_delete'] == module.params['auto_delete'] and
response['internal'] == module.params['internal'] and
response['type'] == module.params['exchange_type']
):
module.fail_json(
msg = "RabbitMQ RESTAPI doesn't support attribute changes for existing exchanges"
)
# Exit if check_mode
if module.check_mode:
module.exit_json(
changed= change_required,
name = module.params['name'],
details = response,
arguments = module.params['arguments']
)
# Do changes
if change_required:
if module.params['state'] == 'present':
r = requests.put(
url,
auth = (module.params['login_user'],module.params['login_password']),
headers = { "content-type": "application/json"},
data = json.dumps({
"durable": module.params['durable'],
"auto_delete": module.params['auto_delete'],
"internal": module.params['internal'],
"type": module.params['exchange_type'],
"arguments": module.params['arguments']
})
)
elif module.params['state'] == 'absent':
r = requests.delete( url, auth = (module.params['login_user'],module.params['login_password']))
if r.status_code == 204:
module.exit_json(
changed = True,
name = module.params['name']
)
else:
module.fail_json(
msg = "Error creating exchange",
status = r.status_code,
details = r.text
)
else:
module.exit_json(
changed = False,
name = module.params['name']
)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
nrwahl2/ansible | test/units/module_utils/test_postgresql.py | 63 | 2863 | import json
import sys
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import builtins
from ansible.module_utils._text import to_native
from units.mock.procenv import swap_stdin_and_argv
import pprint
realimport = builtins.__import__
class TestPostgres(unittest.TestCase):
def clear_modules(self, mods):
for mod in mods:
if mod in sys.modules:
del sys.modules[mod]
@patch.object(builtins, '__import__')
def test_postgres_pg2_missing_ensure_libs(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'psycopg2':
raise ImportError
return realimport(name, *args, **kwargs)
self.clear_modules(['psycopg2', 'ansible.module_utils.postgres'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.postgres')
self.assertFalse(mod.module_utils.postgres.HAS_PSYCOPG2)
with self.assertRaises(mod.module_utils.postgres.LibraryError) as context:
mod.module_utils.postgres.ensure_libs(sslrootcert=None)
self.assertIn('psycopg2 is not installed', to_native(context.exception))
@patch.object(builtins, '__import__')
def test_postgres_pg2_found_ensure_libs(self, mock_import):
def _mock_import(name, *args, **kwargs):
if 'psycopg2' in name:
return MagicMock()
return realimport(name, *args, **kwargs)
self.clear_modules(['psycopg2', 'ansible.module_utils.postgres'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.postgres')
self.assertTrue(mod.module_utils.postgres.HAS_PSYCOPG2)
ensure_ret = mod.module_utils.postgres.ensure_libs(sslrootcert=None)
self.assertFalse(ensure_ret)
pprint.pprint(ensure_ret)
@patch.object(builtins, '__import__')
def test_postgres_pg2_found_ensure_libs_old_version(self, mock_import):
def _mock_import(name, *args, **kwargs):
if 'psycopg2' in name:
m = MagicMock()
m.__version__ = '2.4.1'
return m
return realimport(name, *args, **kwargs)
self.clear_modules(['psycopg2', 'ansible.module_utils.postgres'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.postgres')
self.assertTrue(mod.module_utils.postgres.HAS_PSYCOPG2)
with self.assertRaises(mod.module_utils.postgres.LibraryError) as context:
mod.module_utils.postgres.ensure_libs(sslrootcert='yes')
self.assertIn('psycopg2 must be at least 2.4.3 in order to use', to_native(context.exception))
| gpl-3.0 |
mverzett/rootpy | docs/sphinxext/numpydoc/numpydoc.py | 6 | 5581 | """
========
numpydoc
========
Sphinx extension that handles docstrings in the Numpy standard format. [1]
It will:
- Convert Parameters etc. sections to field lists.
- Convert See Also section to a See also entry.
- Renumber references.
- Extract the signature from the docstring, if it can't be determined otherwise.
.. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard
"""
import os, re, pydoc
from docscrape_sphinx import get_doc_object, SphinxDocString
from sphinx.util.compat import Directive
import inspect
def mangle_docstrings(app, what, name, obj, options, lines,
reference_offset=[0]):
cfg = dict(use_plots=app.config.numpydoc_use_plots,
show_class_members=app.config.numpydoc_show_class_members)
if what == 'module':
# Strip top title
title_re = re.compile(ur'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
re.I|re.S)
lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n")
else:
doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg)
lines[:] = unicode(doc).split(u"\n")
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
obj.__name__:
if hasattr(obj, '__module__'):
v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__))
else:
v = dict(full_name=obj.__name__)
lines += [u'', u'.. htmlonly::', '']
lines += [u' %s' % x for x in
(app.config.numpydoc_edit_link % v).split("\n")]
# replace reference numbers so that there are no duplicates
references = []
for line in lines:
line = line.strip()
m = re.match(ur'^.. \[([a-z0-9_.-])\]', line, re.I)
if m:
references.append(m.group(1))
# start renaming from the longest string, to avoid overwriting parts
references.sort(key=lambda x: -len(x))
if references:
for i, line in enumerate(lines):
for r in references:
if re.match(ur'^\d+$', r):
new_r = u"R%d" % (reference_offset[0] + int(r))
else:
new_r = u"%s%d" % (r, reference_offset[0])
lines[i] = lines[i].replace(u'[%s]_' % r,
u'[%s]_' % new_r)
lines[i] = lines[i].replace(u'.. [%s]' % r,
u'.. [%s]' % new_r)
reference_offset[0] += len(references)
def mangle_signature(app, what, name, obj, options, sig, retann):
# Do not try to inspect classes that don't define `__init__`
if (inspect.isclass(obj) and
(not hasattr(obj, '__init__') or
'initializes x; see ' in pydoc.getdoc(obj.__init__))):
return '', ''
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return
if not hasattr(obj, '__doc__'): return
doc = SphinxDocString(pydoc.getdoc(obj))
if doc['Signature']:
sig = re.sub(u"^[^(]*", u"", doc['Signature'])
return sig, u''
def setup(app, get_doc_object_=get_doc_object):
global get_doc_object
get_doc_object = get_doc_object_
app.connect('autodoc-process-docstring', mangle_docstrings)
app.connect('autodoc-process-signature', mangle_signature)
app.add_config_value('numpydoc_edit_link', None, False)
app.add_config_value('numpydoc_use_plots', None, False)
app.add_config_value('numpydoc_show_class_members', True, True)
# Extra mangling domains
#app.add_domain(NumpyPythonDomain)
#app.add_domain(NumpyCDomain)
#------------------------------------------------------------------------------
# Docstring-mangling domains
#------------------------------------------------------------------------------
from docutils.statemachine import ViewList
from sphinx.domains.c import CDomain
from sphinx.domains.python import PythonDomain
class ManglingDomainBase(object):
directive_mangling_map = {}
def __init__(self, *a, **kw):
super(ManglingDomainBase, self).__init__(*a, **kw)
self.wrap_mangling_directives()
def wrap_mangling_directives(self):
for name, objtype in self.directive_mangling_map.items():
self.directives[name] = wrap_mangling_directive(
self.directives[name], objtype)
class NumpyPythonDomain(ManglingDomainBase, PythonDomain):
name = 'np'
directive_mangling_map = {
'function': 'function',
'class': 'class',
'exception': 'class',
'method': 'function',
'classmethod': 'function',
'staticmethod': 'function',
'attribute': 'attribute',
}
class NumpyCDomain(ManglingDomainBase, CDomain):
name = 'np-c'
directive_mangling_map = {
'function': 'function',
'member': 'attribute',
'macro': 'function',
'type': 'class',
'var': 'object',
}
def wrap_mangling_directive(base_directive, objtype):
class directive(base_directive):
def run(self):
env = self.state.document.settings.env
name = None
if self.arguments:
m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0])
name = m.group(2).strip()
if not name:
name = self.arguments[0]
lines = list(self.content)
mangle_docstrings(env.app, objtype, name, None, None, lines)
self.content = ViewList(lines, self.content.parent)
return base_directive.run(self)
return directive
| gpl-3.0 |
gooftroop/Zeus | contrib/sqlalchemy/dialects/mssql/pyodbc.py | 54 | 9473 | # mssql/pyodbc.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mssql+pyodbc
:name: PyODBC
:dbapi: pyodbc
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
:url: http://pypi.python.org/pypi/pyodbc/
Connecting to PyODBC
--------------------
The URL here is to be translated to PyODBC connection strings, as
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
DSN Connections
^^^^^^^^^^^^^^^
A DSN-based connection is **preferred** overall when using ODBC. A
basic DSN-based connection looks like::
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
Which above, will pass the following connection string to PyODBC::
dsn=mydsn;UID=user;PWD=pass
If the username and password are omitted, the DSN form will also add
the ``Trusted_Connection=yes`` directive to the ODBC string.
Hostname Connections
^^^^^^^^^^^^^^^^^^^^
Hostname-based connections are **not preferred**, however are supported.
The ODBC driver name must be explicitly specified::
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=SQL+Server+Native+Client+10.0")
.. versionchanged:: 1.0.0 Hostname-based PyODBC connections now require the
SQL Server driver name specified explicitly. SQLAlchemy cannot
choose an optimal default here as it varies based on platform
and installed drivers.
Other keywords interpreted by the Pyodbc dialect to be passed to
``pyodbc.connect()`` in both the DSN and hostname cases include:
``odbc_autotranslate``, ``ansi``, ``unicode_results``, ``autocommit``.
Pass through exact Pyodbc string
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A PyODBC connection string can also be sent exactly as specified in
`ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_
into the driver using the parameter ``odbc_connect``. The delimeters must be URL escaped, however,
as illustrated below using ``urllib.quote_plus``::
import urllib
params = urllib.quote_plus("DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password")
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
Unicode Binds
-------------
The current state of PyODBC on a unix backend with FreeTDS and/or
EasySoft is poor regarding unicode; different OS platforms and versions of
UnixODBC versus IODBC versus FreeTDS/EasySoft versus PyODBC itself
dramatically alter how strings are received. The PyODBC dialect attempts to
use all the information it knows to determine whether or not a Python unicode
literal can be passed directly to the PyODBC driver or not; while SQLAlchemy
can encode these to bytestrings first, some users have reported that PyODBC
mis-handles bytestrings for certain encodings and requires a Python unicode
object, while the author has observed widespread cases where a Python unicode
is completely misinterpreted by PyODBC, particularly when dealing with
the information schema tables used in table reflection, and the value
must first be encoded to a bytestring.
It is for this reason that whether or not unicode literals for bound
parameters be sent to PyODBC can be controlled using the
``supports_unicode_binds`` parameter to ``create_engine()``. When
left at its default of ``None``, the PyODBC dialect will use its
best guess as to whether or not the driver deals with unicode literals
well. When ``False``, unicode literals will be encoded first, and when
``True`` unicode literals will be passed straight through. This is an interim
flag that hopefully should not be needed when the unicode situation stabilizes
for unix + PyODBC.
.. versionadded:: 0.7.7
``supports_unicode_binds`` parameter to ``create_engine()``\ .
"""
from .base import MSExecutionContext, MSDialect, VARBINARY
from ...connectors.pyodbc import PyODBCConnector
from ... import types as sqltypes, util
import decimal
class _ms_numeric_pyodbc(object):
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
The routines here are needed for older pyodbc versions
as well as current mxODBC versions.
"""
def bind_processor(self, dialect):
super_process = super(_ms_numeric_pyodbc, self).\
bind_processor(dialect)
if not dialect._need_decimal_fix:
return super_process
def process(value):
if self.asdecimal and \
isinstance(value, decimal.Decimal):
adjusted = value.adjusted()
if adjusted < 0:
return self._small_dec_to_string(value)
elif adjusted > 7:
return self._large_dec_to_string(value)
if super_process:
return super_process(value)
else:
return value
return process
# these routines needed for older versions of pyodbc.
# as of 2.1.8 this logic is integrated.
def _small_dec_to_string(self, value):
return "%s0.%s%s" % (
(value < 0 and '-' or ''),
'0' * (abs(value.adjusted()) - 1),
"".join([str(nint) for nint in value.as_tuple()[1]]))
def _large_dec_to_string(self, value):
_int = value.as_tuple()[1]
if 'E' in str(value):
result = "%s%s%s" % (
(value < 0 and '-' or ''),
"".join([str(s) for s in _int]),
"0" * (value.adjusted() - (len(_int) - 1)))
else:
if (len(_int) - 1) > value.adjusted():
result = "%s%s.%s" % (
(value < 0 and '-' or ''),
"".join(
[str(s) for s in _int][0:value.adjusted() + 1]),
"".join(
[str(s) for s in _int][value.adjusted() + 1:]))
else:
result = "%s%s" % (
(value < 0 and '-' or ''),
"".join(
[str(s) for s in _int][0:value.adjusted() + 1]))
return result
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
pass
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
pass
class _VARBINARY_pyodbc(VARBINARY):
def bind_processor(self, dialect):
if dialect.dbapi is None:
return None
DBAPIBinary = dialect.dbapi.Binary
def process(value):
if value is not None:
return DBAPIBinary(value)
else:
# pyodbc-specific
return dialect.dbapi.BinaryNull
return process
class MSExecutionContext_pyodbc(MSExecutionContext):
_embedded_scope_identity = False
def pre_exec(self):
"""where appropriate, issue "select scope_identity()" in the same
statement.
Background on why "scope_identity()" is preferable to "@@identity":
http://msdn.microsoft.com/en-us/library/ms190315.aspx
Background on why we attempt to embed "scope_identity()" into the same
statement as the INSERT:
http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
"""
super(MSExecutionContext_pyodbc, self).pre_exec()
# don't embed the scope_identity select into an
# "INSERT .. DEFAULT VALUES"
if self._select_lastrowid and \
self.dialect.use_scope_identity and \
len(self.parameters[0]):
self._embedded_scope_identity = True
self.statement += "; select scope_identity()"
def post_exec(self):
if self._embedded_scope_identity:
# Fetch the last inserted id from the manipulated statement
# We may have to skip over a number of result sets with
# no data (due to triggers, etc.)
while True:
try:
# fetchall() ensures the cursor is consumed
# without closing it (FreeTDS particularly)
row = self.cursor.fetchall()[0]
break
except self.dialect.dbapi.Error as e:
# no way around this - nextset() consumes the previous set
# so we need to just keep flipping
self.cursor.nextset()
self._lastrowid = int(row[0])
else:
super(MSExecutionContext_pyodbc, self).post_exec()
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
execution_ctx_cls = MSExecutionContext_pyodbc
colspecs = util.update_copy(
MSDialect.colspecs,
{
sqltypes.Numeric: _MSNumeric_pyodbc,
sqltypes.Float: _MSFloat_pyodbc,
VARBINARY: _VARBINARY_pyodbc,
sqltypes.LargeBinary: _VARBINARY_pyodbc,
}
)
def __init__(self, description_encoding=None, **params):
if 'description_encoding' in params:
self.description_encoding = params.pop('description_encoding')
super(MSDialect_pyodbc, self).__init__(**params)
self.use_scope_identity = self.use_scope_identity and \
self.dbapi and \
hasattr(self.dbapi.Cursor, 'nextset')
self._need_decimal_fix = self.dbapi and \
self._dbapi_version() < (2, 1, 8)
dialect = MSDialect_pyodbc
| mit |
evernym/plenum | plenum/test/bls/test_bls_bft_factory.py | 2 | 1875 | import os
import pytest
from crypto.bls.bls_bft import BlsBft
from crypto.bls.bls_bft_replica import BlsBftReplica
from crypto.bls.bls_key_register import BlsKeyRegister
from plenum.bls.bls_bft_factory import BlsFactoryBftPlenum
from plenum.bls.bls_crypto_factory import create_default_bls_crypto_factory
from plenum.bls.bls_store import BlsStore
i = 0
@pytest.fixture()
def node(txnPoolNodeSet):
return txnPoolNodeSet[0]
@pytest.fixture()
def bls_bft_factory(node, tdir_for_func):
dir = os.path.join(tdir_for_func, 'TestNode')
bls_crypto_factory = create_default_bls_crypto_factory(dir)
return BlsFactoryBftPlenum(bls_crypto_factory,
node)
@pytest.fixture()
def test_bls_store(node):
node.config.stateSignatureDbName = 'TestBlsStore{}'.format(i)
def test_create_bls_bft(bls_bft_factory, test_bls_store):
bls_bft_factory._bls_factory_crypto.generate_and_store_bls_keys()
bls_bft = bls_bft_factory.create_bls_bft()
assert bls_bft
assert isinstance(bls_bft, BlsBft)
assert bls_bft.can_sign_bls()
def test_create_bls_bft_no_keys(bls_bft_factory, test_bls_store):
bls_bft = bls_bft_factory.create_bls_bft()
assert bls_bft
assert isinstance(bls_bft, BlsBft)
assert not bls_bft.can_sign_bls()
def test_create_bls_bft_replica(bls_bft_factory):
bls_bft_replica = bls_bft_factory.create_bls_bft_replica(is_master=True)
assert bls_bft_replica
assert isinstance(bls_bft_replica, BlsBftReplica)
def test_create_bls_store(bls_bft_factory, test_bls_store):
bls_store = bls_bft_factory.create_bls_store()
assert bls_store
assert isinstance(bls_store, BlsStore)
def test_create_bls_key_registry(bls_bft_factory):
bls_key_register = bls_bft_factory.create_bls_key_register()
assert bls_key_register
assert isinstance(bls_key_register, BlsKeyRegister)
| apache-2.0 |
simpeg/simpeg | examples/06-dc/plot_dipoledipole_3Dinversion_twospheres.py | 1 | 6653 | """
3D DC inversion of Dipole Dipole array
======================================
This is an example for 3D DC Inversion. The model consists of 2 spheres,
one conductive, the other one resistive compared to the background.
We restrain the inversion to the Core Mesh through the use an Active Cells
mapping that we combine with an exponetial mapping to invert
in log conductivity space. Here mapping, :math:`\\mathcal{M}`,
indicates transformation of our model to a different space:
.. math::
\\sigma = \\mathcal{M}(\\mathbf{m})
Following example will show you how user can implement a 3D DC inversion.
"""
from SimPEG import (
Mesh, Maps, Utils,
DataMisfit, Regularization, Optimization,
InvProblem, Directives, Inversion
)
from SimPEG.EM.Static import DC, Utils as DCUtils
import numpy as np
import matplotlib.pyplot as plt
try:
from pymatsolver import Pardiso as Solver
except ImportError:
from SimPEG import SolverLU as Solver
np.random.seed(12345)
# 3D Mesh
#########
# Cell sizes
csx, csy, csz = 1., 1., 0.5
# Number of core cells in each direction
ncx, ncy, ncz = 41, 31, 21
# Number of padding cells to add in each direction
npad = 7
# Vectors of cell lengths in each direction with padding
hx = [(csx, npad, -1.5), (csx, ncx), (csx, npad, 1.5)]
hy = [(csy, npad, -1.5), (csy, ncy), (csy, npad, 1.5)]
hz = [(csz, npad, -1.5), (csz, ncz)]
# Create mesh and center it
mesh = Mesh.TensorMesh([hx, hy, hz], x0="CCN")
# 2-spheres Model Creation
##########################
# Spheres parameters
x0, y0, z0, r0 = -6., 0., -3.5, 3.
x1, y1, z1, r1 = 6., 0., -3.5, 3.
# ln conductivity
ln_sigback = -5.
ln_sigc = -3.
ln_sigr = -6.
# Define model
# Background
mtrue = ln_sigback * np.ones(mesh.nC)
# Conductive sphere
csph = (np.sqrt((mesh.gridCC[:, 0] - x0)**2. + (mesh.gridCC[:, 1] - y0)**2. +
(mesh.gridCC[:, 2] - z0)**2.)) < r0
mtrue[csph] = ln_sigc * np.ones_like(mtrue[csph])
# Resistive Sphere
rsph = (np.sqrt((mesh.gridCC[:, 0] - x1)**2. + (mesh.gridCC[:, 1] - y1)**2. +
(mesh.gridCC[:, 2] - z1)**2.)) < r1
mtrue[rsph] = ln_sigr * np.ones_like(mtrue[rsph])
# Extract Core Mesh
xmin, xmax = -20., 20.
ymin, ymax = -15., 15.
zmin, zmax = -10., 0.
xyzlim = np.r_[[[xmin, xmax], [ymin, ymax], [zmin, zmax]]]
actind, meshCore = Utils.meshutils.ExtractCoreMesh(xyzlim, mesh)
# Function to plot cylinder border
def getCylinderPoints(xc, zc, r):
xLocOrig1 = np.arange(-r, r + r / 10., r / 10.)
xLocOrig2 = np.arange(r, -r - r / 10., -r / 10.)
# Top half of cylinder
zLoc1 = np.sqrt(-xLocOrig1**2. + r**2.) + zc
# Bottom half of cylinder
zLoc2 = -np.sqrt(-xLocOrig2**2. + r**2.) + zc
# Shift from x = 0 to xc
xLoc1 = xLocOrig1 + xc * np.ones_like(xLocOrig1)
xLoc2 = xLocOrig2 + xc * np.ones_like(xLocOrig2)
topHalf = np.vstack([xLoc1, zLoc1]).T
topHalf = topHalf[0:-1, :]
bottomHalf = np.vstack([xLoc2, zLoc2]).T
bottomHalf = bottomHalf[0:-1, :]
cylinderPoints = np.vstack([topHalf, bottomHalf])
cylinderPoints = np.vstack([cylinderPoints, topHalf[0, :]])
return cylinderPoints
# Setup a synthetic Dipole-Dipole Survey
# Line 1
xmin, xmax = -15., 15.
ymin, ymax = 0., 0.
zmin, zmax = 0, 0
endl = np.array([[xmin, ymin, zmin], [xmax, ymax, zmax]])
survey1 = DCUtils.gen_DCIPsurvey(endl, "dipole-dipole", dim=mesh.dim,
a=3, b=3, n=8)
# Line 2
xmin, xmax = -15., 15.
ymin, ymax = 5., 5.
zmin, zmax = 0, 0
endl = np.array([[xmin, ymin, zmin], [xmax, ymax, zmax]])
survey2 = DCUtils.gen_DCIPsurvey(endl, "dipole-dipole", dim=mesh.dim,
a=3, b=3, n=8)
# Line 3
xmin, xmax = -15., 15.
ymin, ymax = -5., -5.
zmin, zmax = 0, 0
endl = np.array([[xmin, ymin, zmin], [xmax, ymax, zmax]])
survey3 = DCUtils.gen_DCIPsurvey(endl, "dipole-dipole", dim=mesh.dim,
a=3, b=3, n=8)
# Concatenate lines
survey = DC.Survey(survey1.srcList + survey2.srcList + survey3.srcList)
# Setup Problem with exponential mapping and Active cells only in the core mesh
expmap = Maps.ExpMap(mesh)
mapactive = Maps.InjectActiveCells(mesh=mesh, indActive=actind,
valInactive=-5.)
mapping = expmap * mapactive
problem = DC.Problem3D_CC(mesh, sigmaMap=mapping)
problem.pair(survey)
problem.Solver = Solver
survey.dpred(mtrue[actind])
survey.makeSyntheticData(mtrue[actind], std=0.05, force=True)
# Tikhonov Inversion
####################
# Initial Model
m0 = np.median(ln_sigback) * np.ones(mapping.nP)
# Data Misfit
dmis = DataMisfit.l2_DataMisfit(survey)
# Regularization
regT = Regularization.Simple(mesh, indActive=actind, alpha_s=1e-6,
alpha_x=1., alpha_y=1., alpha_z=1.)
# Optimization Scheme
opt = Optimization.InexactGaussNewton(maxIter=10)
# Form the problem
opt.remember('xc')
invProb = InvProblem.BaseInvProblem(dmis, regT, opt)
# Directives for Inversions
beta = Directives.BetaEstimate_ByEig(beta0_ratio=1e+1)
Target = Directives.TargetMisfit()
betaSched = Directives.BetaSchedule(coolingFactor=5., coolingRate=2)
inv = Inversion.BaseInversion(invProb, directiveList=[beta, Target,
betaSched])
# Run Inversion
minv = inv.run(m0)
# Final Plot
############
fig, ax = plt.subplots(2, 2, figsize=(12, 6))
ax = Utils.mkvc(ax)
cyl0v = getCylinderPoints(x0, z0, r0)
cyl1v = getCylinderPoints(x1, z1, r1)
cyl0h = getCylinderPoints(x0, y0, r0)
cyl1h = getCylinderPoints(x1, y1, r1)
clim = [(mtrue[actind]).min(), (mtrue[actind]).max()]
dat = meshCore.plotSlice(((mtrue[actind])), ax=ax[0], normal='Y', clim=clim,
ind=int(ncy / 2))
ax[0].set_title('Ground Truth, Vertical')
ax[0].set_aspect('equal')
meshCore.plotSlice((minv), ax=ax[1], normal='Y', clim=clim, ind=int(ncy / 2))
ax[1].set_aspect('equal')
ax[1].set_title('Inverted Model, Vertical')
meshCore.plotSlice(((mtrue[actind])), ax=ax[2], normal='Z', clim=clim,
ind=int(ncz / 2))
ax[2].set_title('Ground Truth, Horizontal')
ax[2].set_aspect('equal')
meshCore.plotSlice((minv), ax=ax[3], normal='Z', clim=clim, ind=int(ncz / 2))
ax[3].set_title('Inverted Model, Horizontal')
ax[3].set_aspect('equal')
for i in range(2):
ax[i].plot(cyl0v[:, 0], cyl0v[:, 1], 'k--')
ax[i].plot(cyl1v[:, 0], cyl1v[:, 1], 'k--')
for i in range(2, 4):
ax[i].plot(cyl1h[:, 0], cyl1h[:, 1], 'k--')
ax[i].plot(cyl0h[:, 0], cyl0h[:, 1], 'k--')
fig.subplots_adjust(right=0.8)
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
cb = plt.colorbar(dat[0], ax=cbar_ax)
cb.set_label('ln conductivity')
cbar_ax.axis('off')
plt.show()
| mit |
mitschabaude/nanopores | nanopores/geometries/Cyl2D/py4geo.py | 1 | 2455 | import numpy
from params_geo import *
import nanopores.py4gmsh.basic
import nanopores.py4gmsh.extra
from nanopores.py4gmsh import *
def get_geo(**params):
"""
writes a 2d geo file for an axisymmetric geometry for particle in cylinder
"""
basic = reload(nanopores.py4gmsh.basic)
extra = reload(nanopores.py4gmsh.extra)
globals().update(params)
lcMolecule = r/10
lcFluid = min(min(R,l/2)/5, max(R,l/2)/60)
X_Fluid = numpy.array([[0, l/2, 0],
[R, l/2, 0],
[R,-l/2, 0],
[0,-l/2, 0]])
X_Molecule = numpy.array([[0, z0 - r, 0],
[0, z0 , 0],
[0, z0 + r, 0]])
p_Fluid = [Point(x, lcFluid) for x in X_Fluid]
p_Molecule = [Point(x, lcMolecule) for x in X_Molecule]
p_Fluid.append(p_Molecule[0])
p_Fluid.append(p_Molecule[2])
c_Molecule = Circle(p_Molecule)
e_Molecule = Line(p_Molecule[2], p_Molecule[0])
ll_Molecule = LineLoop([c_Molecule, e_Molecule])
s_Molecule = PlaneSurface(ll_Molecule)
e_Fluid = [Line(p_Fluid[k], p_Fluid[k+1]) for k in range(len(p_Fluid)-1)]
e_Fluid.append(Line(p_Fluid[-1], p_Fluid[0]))
e_Fluid.pop(len(e_Fluid)-2)
e_Fluid.insert(len(e_Fluid)-1, c_Molecule)
ll_Fluid = LineLoop(e_Fluid)
s_Fluid = PlaneSurface(ll_Fluid)
box1 = BoxField(lcFluid, lcFluid, 0.0, R, -l/2, l/2)
box2 = BoxField(lcMolecule, lcFluid, -1e-15, 6*r, z0-6*r, z0+6*r)
field_list = [box1,box2]
raw_code(['bfield = newf;'])
raw_code(['Field[bfield] = Min;'])
raw_code(['Field[bfield].FieldsList = {%s};' %', '.join(field_list)])
raw_code(['Background Field = bfield;'])
# to disable question dialogs
raw_code(['General.ExpertMode = 1;'])
# Don't extend the elements sizes from the boundary inside the domain
raw_code(['Mesh.CharacteristicLengthExtendFromBoundary = 0;'])
# 2D algorithm (1=MeshAdapt, 2=Automatic, 5=Delaunay, 6=Frontal, 7=bamg, 8=delquad)
# only delaunay is compatible with attractor fields
raw_code(['Mesh.Algorithm = 5;'])
geo_dict = {"gmsh mesh generating sript": __name__,
"Typical length scale": lcFluid,
"geo_code": get_code(),}
return geo_dict
# -----
if __name__ == '__main__':
print(get_geo())
print('\n - This is the sample code for the geo file')
| mit |
timasjov/famous-algorithms | maximize-minimize-expression/max-min.py | 1 | 1658 | def maxmin(a, type):
if operation == 'max':
type = True
else:
type = False
height = len(a)
matrix = [[0] * height for row in xrange(height)]
for nr_signs in range(height):
matrix[nr_signs][nr_signs] = int(a[nr_signs])
for nr_signs in range(1, height): # loop total number of signs needed to put
for row in range(0, height - nr_signs): # loop total number of signs per row
add = []
mult = []
pos = nr_signs + row # position in row where to put the element
for operation in range(nr_signs): # number of operations per row per sign
add.append(matrix[row][row + operation] + matrix[row + operation + 1][row + nr_signs])
mult.append(matrix[row][row + operation] * matrix[row + operation + 1][row + nr_signs])
if type:
max_mult = max(mult)
max_add = max(add)
else:
max_mult = min(mult)
max_add = min(add)
if max_mult >= max_add and type:
matrix[row][pos] = max_mult
elif type:
matrix[row][pos] = max_add
elif max_mult <= max_add:
matrix[row][pos] = max_mult
else:
matrix[row][pos] = max_add
return matrix
numbers = [2, 1, 2, 1, 1, 3, 6, 10, 1, 2, 2, 1, 6, 1, 2, 2, 1, 7, 2, 1, 1, 3, 2, 1, 5, 1, 7, 2, 1, 2, 1, 2, 1, 2, 3, 1, 4, 4, 5, 2, 1, 2, 2, 2, 1, 1, 1, 2, 3]
result_max = maxmin(numbers, "max")
print('Max: ' + str(result_max[0][-1]))
result_min = maxmin(numbers, "min")
print('Min: ' + str(result_min[0][-1])) | mit |
sbalde/edx-platform | common/djangoapps/embargo/test_utils.py | 146 | 2883 | """Utilities for writing unit tests that involve course embargos. """
import contextlib
import mock
import pygeoip
from django.core.urlresolvers import reverse
from django.core.cache import cache
from embargo.models import Country, CountryAccessRule, RestrictedCourse
@contextlib.contextmanager
def restrict_course(course_key, access_point="enrollment", disable_access_check=False):
"""Simulate that a course is restricted.
This does two things:
1) Configures country access rules so that the course is restricted.
2) Mocks the GeoIP call so the user appears to be coming
from a country that's blocked from the course.
This is useful for tests that need to verify
that restricted users won't be able to access
particular views.
Arguments:
course_key (CourseKey): The location of the course to block.
Keyword Arguments:
access_point (str): Either "courseware" or "enrollment"
Yields:
str: A URL to the page in the embargo app that explains
why the user was blocked.
Example Usage:
>>> with restrict_course(course_key) as redirect_url:
>>> # The client will appear to be coming from
>>> # an IP address that is blocked.
>>> resp = self.client.get(url)
>>> self.assertRedirects(resp, redirect_url)
"""
# Clear the cache to ensure that previous tests don't interfere
# with this test.
cache.clear()
with mock.patch.object(pygeoip.GeoIP, 'country_code_by_addr') as mock_ip:
# Remove all existing rules for the course
CountryAccessRule.objects.all().delete()
# Create the country object
# Ordinarily, we'd create models for every country,
# but that would slow down the test suite.
country, __ = Country.objects.get_or_create(country='IR')
# Create a model for the restricted course
restricted_course, __ = RestrictedCourse.objects.get_or_create(course_key=course_key)
restricted_course.enroll_msg_key = 'default'
restricted_course.access_msg_key = 'default'
restricted_course.disable_access_check = disable_access_check
restricted_course.save()
# Ensure that there is a blacklist rule for the country
CountryAccessRule.objects.get_or_create(
restricted_course=restricted_course,
country=country,
rule_type='blacklist'
)
# Simulate that the user is coming from the blacklisted country
mock_ip.return_value = 'IR'
# Yield the redirect url so the tests don't need to know
# the embargo messaging URL structure.
redirect_url = reverse(
'embargo_blocked_message',
kwargs={
'access_point': access_point,
'message_key': 'default'
}
)
yield redirect_url
| agpl-3.0 |
krsjoseph/youtube-dl | youtube_dl/extractor/r7.py | 130 | 3674 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
js_to_json,
unescapeHTML,
int_or_none,
)
class R7IE(InfoExtractor):
_VALID_URL = r'''(?x)https?://
(?:
(?:[a-zA-Z]+)\.r7\.com(?:/[^/]+)+/idmedia/|
noticias\.r7\.com(?:/[^/]+)+/[^/]+-|
player\.r7\.com/video/i/
)
(?P<id>[\da-f]{24})
'''
_TESTS = [{
'url': 'http://videos.r7.com/policiais-humilham-suspeito-a-beira-da-morte-morre-com-dignidade-/idmedia/54e7050b0cf2ff57e0279389.html',
'md5': '403c4e393617e8e8ddc748978ee8efde',
'info_dict': {
'id': '54e7050b0cf2ff57e0279389',
'ext': 'mp4',
'title': 'Policiais humilham suspeito à beira da morte: "Morre com dignidade"',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 98,
'like_count': int,
'view_count': int,
},
}, {
'url': 'http://esportes.r7.com/videos/cigano-manda-recado-aos-fas/idmedia/4e176727b51a048ee6646a1b.html',
'only_matching': True,
}, {
'url': 'http://noticias.r7.com/record-news/video/representante-do-instituto-sou-da-paz-fala-sobre-fim-do-estatuto-do-desarmamento-5480fc580cf2285b117f438d/',
'only_matching': True,
}, {
'url': 'http://player.r7.com/video/i/54e7050b0cf2ff57e0279389?play=true&video=http://vsh.r7.com/54e7050b0cf2ff57e0279389/ER7_RE_BG_MORTE_JOVENS_570kbps_2015-02-2009f17818-cc82-4c8f-86dc-89a66934e633-ATOS_copy.mp4&linkCallback=http://videos.r7.com/policiais-humilham-suspeito-a-beira-da-morte-morre-com-dignidade-/idmedia/54e7050b0cf2ff57e0279389.html&thumbnail=http://vtb.r7.com/ER7_RE_BG_MORTE_JOVENS_570kbps_2015-02-2009f17818-cc82-4c8f-86dc-89a66934e633-thumb.jpg&idCategory=192&share=true&layout=full&full=true',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://player.r7.com/video/i/%s' % video_id, video_id)
item = self._parse_json(js_to_json(self._search_regex(
r'(?s)var\s+item\s*=\s*({.+?});', webpage, 'player')), video_id)
title = unescapeHTML(item['title'])
thumbnail = item.get('init', {}).get('thumbUri')
duration = None
statistics = item.get('statistics', {})
like_count = int_or_none(statistics.get('likes'))
view_count = int_or_none(statistics.get('views'))
formats = []
for format_key, format_dict in item['playlist'][0].items():
src = format_dict.get('src')
if not src:
continue
format_id = format_dict.get('format') or format_key
if duration is None:
duration = format_dict.get('duration')
if '.f4m' in src:
formats.extend(self._extract_f4m_formats(src, video_id, preference=-1))
elif src.endswith('.m3u8'):
formats.extend(self._extract_m3u8_formats(src, video_id, 'mp4', preference=-2))
else:
formats.append({
'url': src,
'format_id': format_id,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'like_count': like_count,
'view_count': view_count,
'formats': formats,
}
| unlicense |
wuzhenda/gaedav | pyxml/dom/html/HTMLFrameSetElement.py | 4 | 1501 | ########################################################################
#
# File Name: HTMLFrameSetElement
#
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from pyxml.dom import Node
from pyxml.dom.html.HTMLElement import HTMLElement
class HTMLFrameSetElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="FRAMESET"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_cols(self):
return self.getAttribute("COLS")
def _set_cols(self, value):
self.setAttribute("COLS", value)
def _get_rows(self):
return self.getAttribute("ROWS")
def _set_rows(self, value):
self.setAttribute("ROWS", value)
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"cols" : _get_cols,
"rows" : _get_rows
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"cols" : _set_cols,
"rows" : _set_rows
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
| lgpl-2.1 |
gpitel/pyjs | examples/libtest/RandomModuleTest.py | 12 | 3727 | from UnitTest import UnitTest
import random
from math import log, exp, sqrt, pi
try:
from math import fsum as msum
except:
# fsum is new in 2.6
from math import fabs
def msum(x):
xx = [(fabs(v), i) for i, v in enumerate(x)]
xx.sort()
sum = 0
for i in xx:
sum += x[i[1]]
return sum
_gammacoeff = (0.9999999999995183, 676.5203681218835, -1259.139216722289,
771.3234287757674, -176.6150291498386, 12.50734324009056,
-0.1385710331296526, 0.9934937113930748e-05, 0.1659470187408462e-06)
def gamma(z, cof=_gammacoeff, g=7):
z -= 1.0
# Next line fails when not compiled with --operator-funcs
#s = msum([cof[0]] + [cof[i] / (z+i) for i in range(1,len(cof))])
v1 = [cof[0]]
v2 = [cof[i] / (z+i) for i in range(1,len(cof))]
v1 = v1.__add__(v2)
s = msum(v1)
z += 0.5
return (z+g)**z / exp(z+g) * sqrt(2.0*pi) * s
class RandomModuleTest(UnitTest):
def test_zeroinputs(self):
# Verify that distributions can handle a series of zero inputs'
g = random.Random()
xx = [g.random() for i in xrange(50)]
x = [0.0]
xx = xx.__add__(x.__mul__(5))
x = xx[:]
g.random = getattr(x, 'pop')
g.uniform(1,10)
x = xx[:]
g.random = getattr(x, 'pop')
g.paretovariate(1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.expovariate(1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.weibullvariate(1.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.normalvariate(0.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.gauss(0.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.lognormvariate(0.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.vonmisesvariate(0.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.gammavariate(0.01, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.gammavariate(1.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.gammavariate(200.0, 1.0)
x = xx[:]
g.random = getattr(x, 'pop')
g.betavariate(3.0, 3.0)
if hasattr(g, 'triangular'):
x = xx[:]
g.random = getattr(x, 'pop')
g.triangular(0.0, 1.0, 1.0/3.0)
def test_avg_std(self):
# Use integration to test distribution average and standard deviation.
# Only works for distributions which do not consume variates in pairs
g = random.Random()
N = 5000
xx = [i/float(N) for i in xrange(1,N)]
dists = [
(g.uniform, (1.0,10.0), (10.0+1.0)/2, (10.0-1.0)**2/12),
(g.expovariate, (1.5,), 1/1.5, 1/1.5**2),
(g.paretovariate, (5.0,), 5.0/(5.0-1),
5.0/((5.0-1)**2*(5.0-2))),
(g.weibullvariate, (1.0, 3.0), gamma(1+1/3.0),
gamma(1+2/3.0)-gamma(1+1/3.0)**2) ]
if hasattr(g, 'triangular'):
dists.append((g.triangular, (0.0, 1.0, 1.0/3.0), 4.0/9.0, 7.0/9.0/18.0))
for variate, args, mu, sigmasqrd in dists:
x = xx[:]
g.random = getattr(x, 'pop')
y = []
for i in xrange(len(x)):
try:
y.append(variate(*args))
except IndexError:
pass
s1 = s2 = 0
for e in y:
s1 += e
s2 += (e - mu) ** 2
N = len(y)
self.assertAlmostEqual(s1/N, mu, 2)
self.assertAlmostEqual(s2/(N-1), sigmasqrd, 2)
| apache-2.0 |
maximermilov/cauliflowervest | cauliflowervest/server/handlers/duplicity.py | 1 | 1514 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for handling Duplicity GPG key pairs."""
import re
from cauliflowervest.server import permissions
from cauliflowervest.server.handlers import passphrase_handler
from cauliflowervest.server.models import volumes as models
class Duplicity(passphrase_handler.PassphraseHandler):
"""Handler for /duplicity URL."""
AUDIT_LOG_MODEL = models.DuplicityAccessLog
SECRET_MODEL = models.DuplicityKeyPair
PERMISSION_TYPE = permissions.TYPE_DUPLICITY
JSON_SECRET_NAME = 'key_pair'
TARGET_ID_REGEX = re.compile(r'^[a-f0-9]{32}$')
def _CreateNewSecretEntity(self, owner, volume_uuid, secret):
return models.DuplicityKeyPair(
key_pair=str(secret),
owner=owner,
volume_uuid=volume_uuid)
def RetrieveSecret(self, volume_uuid):
"""Handles a GET request to retrieve a key pair."""
self.request.json = '1'
return super(Duplicity, self).RetrieveSecret(volume_uuid)
| apache-2.0 |
palashahuja/myhdl | myhdl/test/conversion/general/test_nonlocal.py | 3 | 1243 | import os
path = os.path
from myhdl import *
def NonlocalBench():
ALL_ONES = 2**7-1
ONE = 1
qout = Signal(intbv(ONE)[8:])
init = Signal(bool(0))
clk = Signal(bool(0))
reset = ResetSignal(0, active=1, async=True)
q = intbv(ONE)[8:]
@always_seq(clk.posedge, reset=reset)
def scrambler():
if init:
q[8:1] = ALL_ONES
else:
q[0] = q[7] ^ q[6]
q[8:1] = q[7:0]
qout.next = q[8:1]
@instance
def clkgen():
clk.next = 1
while True:
yield delay(10)
clk.next = not clk
@instance
def stimulus():
reset.next = 0
init.next = 0
yield clk.negedge
reset.next = 1
yield clk.negedge
print qout
assert qout == ONE
reset.next = 0
for i in range(100):
yield clk.negedge
print qout
init.next = 1
yield clk.negedge
assert qout == ALL_ONES
print qout
init.next = 0
for i in range(300):
print qout
raise StopSimulation()
return scrambler, clkgen, stimulus
def test_nonlocal():
assert conversion.verify(NonlocalBench) == 0
| lgpl-2.1 |
mmcloughlin/finsky | finsky/protos/book_doc_details_pb2.py | 2 | 3805 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: book_doc_details.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='book_doc_details.proto',
package='BookDocDetails',
syntax='proto2',
serialized_pb=_b('\n\x16\x62ook_doc_details.proto\x12\x0e\x42ookDocDetails\"v\n\x0b\x42ookDetails\x12\x11\n\tpublisher\x18\x04 \x01(\t\x12\x17\n\x0fpublicationDate\x18\x05 \x01(\t\x12\x0c\n\x04isbn\x18\x06 \x01(\t\x12\x15\n\rnumberOfPages\x18\x07 \x01(\x05\x12\x16\n\x0e\x61\x62outTheAuthor\x18\x11 \x01(\tB2\n com.google.android.finsky.protosB\x0e\x42ookDocDetails')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_BOOKDETAILS = _descriptor.Descriptor(
name='BookDetails',
full_name='BookDocDetails.BookDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='publisher', full_name='BookDocDetails.BookDetails.publisher', index=0,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='publicationDate', full_name='BookDocDetails.BookDetails.publicationDate', index=1,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='isbn', full_name='BookDocDetails.BookDetails.isbn', index=2,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='numberOfPages', full_name='BookDocDetails.BookDetails.numberOfPages', index=3,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aboutTheAuthor', full_name='BookDocDetails.BookDetails.aboutTheAuthor', index=4,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=42,
serialized_end=160,
)
DESCRIPTOR.message_types_by_name['BookDetails'] = _BOOKDETAILS
BookDetails = _reflection.GeneratedProtocolMessageType('BookDetails', (_message.Message,), dict(
DESCRIPTOR = _BOOKDETAILS,
__module__ = 'book_doc_details_pb2'
# @@protoc_insertion_point(class_scope:BookDocDetails.BookDetails)
))
_sym_db.RegisterMessage(BookDetails)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n com.google.android.finsky.protosB\016BookDocDetails'))
# @@protoc_insertion_point(module_scope)
| mit |
ai-se/Transfer-Learning | src/RQ1_3.py | 1 | 4195 | """
Compare XTREE with other threshold based learners.
"""
from __future__ import print_function, division
import os
import sys
# Update path
root = os.path.join(os.getcwd().split('src')[0], 'src')
if root not in sys.path:
sys.path.append(root)
from planners.XTREE import xtree
from planners.alves import alves
from utils.plot_util import plot_bar
from planners.shatnawi import shatnawi
from planners.oliveira import oliveira
from data.get_data import get_all_projects
from utils.file_util import list2dataframe
from utils.stats_utils.auec import compute_auec
from utils.rq_utils import measure_overlap, reshape
def research_question_1_3(decrease=True, verbose=True, plot_results=True):
"""
RQ1: How effective is XTREE?
RQ3: How does XTREE compare with BELLTREE? (The XTREE part of this RQ is answered here)
Parameters
----------
decrease: Bool
Compute AUPEC for defects reduced.
verbose: Bool
Display results on the console
plot_results: Bool
Save barcharts of overlap vs. defects increased/decreased
"""
data = get_all_projects()
if verbose:
print("Data \tXTREE\tAlves\tShatw\tOlive")
for proj, paths in data.iteritems():
i = 0
for train, test, validation in zip(paths.data[:-2], paths.data[1:-1], paths.data[2:]):
i += 1
"Convert to pandas type dataframe"
train = list2dataframe(train)
test = list2dataframe(test)
validation = list2dataframe(validation)
"Recommend changes with XTREE"
patched_xtree = xtree(train[train.columns[1:]], test)
patched_alves = alves(train[train.columns[1:]], test)
patched_shatw = shatnawi(train[train.columns[1:]], test)
patched_olive = oliveira(train[train.columns[1:]], test)
"Compute overlap with developers changes"
res_xtree = measure_overlap(test, patched_xtree, validation)
res_alves = measure_overlap(test, patched_alves, validation)
res_shatw = measure_overlap(test, patched_shatw, validation)
res_olive = measure_overlap(test, patched_olive, validation)
"AUPEC of defects decreased/increased"
res_dec, res_inc = reshape(res_xtree, res_alves, res_shatw, res_olive)
"Plot the results"
if plot_results:
plot_bar(res_inc, res_dec, save_path=os.path.join(
root, "results", "RQ1", proj), title="{} v{}".format(proj, i), y_lbl="Defects",
postfix="")
"Max/Min to normalize AUPEC"
y_max = max(res_dec.max(axis=0).values)
y_min = max(res_dec.min(axis=0).values)
if decrease:
"Decrease AUC"
xtree_dec_auc = compute_auec(res_dec[["Overlap", "XTREE"]], y_max, y_min)
alves_dec_auc = compute_auec(res_dec[["Overlap", "Alves"]], y_max, y_min)
shatw_dec_auc = compute_auec(res_dec[["Overlap", "Shatnawi"]], y_max, y_min)
olive_dec_auc = compute_auec(res_dec[["Overlap", "Oliveira"]], y_max, y_min)
if verbose:
print("{}-{}\t{}\t{}\t{}\t{}".format(proj[:3], i, xtree_dec_auc, alves_dec_auc, shatw_dec_auc, olive_dec_auc))
else:
"Increase AUC"
xtree_inc_auc = compute_auec(res_inc[["Overlap", "XTREE"]], y_max, y_min)
alves_inc_auc = compute_auec(res_inc[["Overlap", "Alves"]], y_max, y_min)
shatw_inc_auc = compute_auec(res_inc[["Overlap", "Shatnawi"]], y_max, y_min)
olive_inc_auc = compute_auec(res_inc[["Overlap", "Oliveira"]], y_max, y_min)
if verbose:
print("{}-{}\t{}\t{}\t{}\t{}".format(proj[:3], i, xtree_inc_auc, alves_inc_auc, shatw_inc_auc, olive_inc_auc))
if __name__ == "__main__":
print("AUPEC: Defects Reduced\n{}".format(22*"-"))
research_question_1_3(decrease=True, verbose=True, plot_results=False)
print("\n"+40*"="+"\nAUPEC: Defects Increased\n"+24*"-")
research_question_1_3(decrease=False, verbose=True, plot_results=False)
| unlicense |
clembou/rasterio | rasterio/fill.py | 5 | 2066 | import rasterio
from rasterio._fill import _fillnodata
def fillnodata(
image,
mask=None,
max_search_distance=100.0,
smoothing_iterations=0):
"""
Fill holes in a raster dataset by interpolation from the edges.
This algorithm will interpolate values for all designated nodata
pixels (marked by zeros in `mask`). For each pixel a four direction
conic search is done to find values to interpolate from (using
inverse distance weighting). Once all values are interpolated, zero
or more smoothing iterations (3x3 average filters on interpolated
pixels) are applied to smooth out artifacts.
This algorithm is generally suitable for interpolating missing
regions of fairly continuously varying rasters (such as elevation
models for instance). It is also suitable for filling small holes
and cracks in more irregularly varying images (like aerial photos).
It is generally not so great for interpolating a raster from sparse
point data.
Parameters
----------
image : numpy ndarray
The source containing nodata holes.
mask : numpy ndarray or None
A mask band indicating which pixels to interpolate. Pixels to
interpolate into are indicated by the value 0. Values > 0
indicate areas to use during interpolation. Must be same shape
as image. If `None`, a mask will be diagnosed from the source
data.
max_search_distance : float, optional
The maxmimum number of pixels to search in all directions to
find values to interpolate from. The default is 100.
smoothing_iterations : integer, optional
The number of 3x3 smoothing filter passes to run. The default is
0.
Returns
-------
out : numpy ndarray
The filled raster array.
"""
max_search_distance = float(max_search_distance)
smoothing_iterations = int(smoothing_iterations)
with rasterio.drivers():
return _fillnodata(
image, mask, max_search_distance, smoothing_iterations)
| bsd-3-clause |
devendermishrajio/nova | nova/tests/functional/api_sample_tests/test_instance_usage_audit_log.py | 16 | 2069 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class InstanceUsageAuditLogJsonTest(api_sample_base.ApiSampleTestBaseV21):
ADMIN_API = True
extension_name = "os-instance-usage-audit-log"
def _get_flags(self):
f = super(InstanceUsageAuditLogJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.instance_usage_audit_log.'
'Instance_usage_audit_log')
return f
def test_show_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log/%s' %
urllib.quote('2012-07-05 10:00:00'))
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('inst-usage-audit-log-show-get-resp',
subs, response, 200)
def test_index_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('inst-usage-audit-log-index-get-resp',
subs, response, 200)
| apache-2.0 |
briancurtin/python-openstacksdk | openstack/cluster/v1/cluster.py | 1 | 6541 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.cluster import cluster_service
from openstack import resource2 as resource
from openstack import utils
class Cluster(resource.Resource):
resource_key = 'cluster'
resources_key = 'clusters'
base_path = '/clusters'
service = cluster_service.ClusterService()
# capabilities
allow_create = True
allow_get = True
allow_update = True
allow_delete = True
allow_list = True
patch_update = True
_query_mapping = resource.QueryParameters(
'name', 'status', 'sort', 'global_project')
# Properties
#: The name of the cluster.
name = resource.Body('name')
#: The ID of the profile used by this cluster.
profile_id = resource.Body('profile_id')
#: The ID of the user who created this cluster, thus the owner of it.
user_id = resource.Body('user')
#: The ID of the project this cluster belongs to.
project_id = resource.Body('project')
#: The domain ID of the cluster owner.
domain_id = resource.Body('domain')
#: Timestamp of when the cluster was initialized.
#: *Type: datetime object parsed from ISO 8601 formatted string*
init_at = resource.Body('init_at')
#: Timestamp of when the cluster was created.
#: *Type: datetime object parsed from ISO 8601 formatted string*
created_at = resource.Body('created_at')
#: Timestamp of when the cluster was last updated.
#: *Type: datetime object parsed from ISO 8601 formatted string*
updated_at = resource.Body('updated_at')
#: Lower bound (inclusive) for the size of the cluster.
min_size = resource.Body('min_size', type=int)
#: Upper bound (inclusive) for the size of the cluster. A value of
#: -1 indicates that there is no upper limit of cluster size.
max_size = resource.Body('max_size', type=int)
#: Desired capacity for the cluster. A cluster would be created at the
#: scale specified by this value.
desired_capacity = resource.Body('desired_capacity', type=int)
#: Default timeout (in seconds) for cluster operations.
timeout = resource.Body('timeout')
#: A string representation of the cluster status.
status = resource.Body('status')
#: A string describing the reason why the cluster in current status.
status_reason = resource.Body('status_reason')
#: A dictionary configuration for cluster.
config = resource.Body('config', type=dict)
#: A collection of key-value pairs that are attached to the cluster.
metadata = resource.Body('metadata', type=dict)
#: A dictionary with some runtime data associated with the cluster.
data = resource.Body('data', type=dict)
#: A list IDs of nodes that are members of the cluster.
node_ids = resource.Body('nodes')
#: Name of the profile used by the cluster.
profile_name = resource.Body('profile_name')
#: Specify whether the cluster update should only pertain to the profile.
is_profile_only = resource.Body('profile_only', type=bool)
#: A dictionary with dependency information of the cluster
dependents = resource.Body('dependents', type=dict)
def action(self, session, body):
url = utils.urljoin(self.base_path, self._get_id(self), 'actions')
resp = session.post(url, endpoint_filter=self.service, json=body)
return resp.json()
def add_nodes(self, session, nodes):
body = {
'add_nodes': {
'nodes': nodes,
}
}
return self.action(session, body)
def del_nodes(self, session, nodes, **params):
data = {'nodes': nodes}
data.update(params)
body = {
'del_nodes': data
}
return self.action(session, body)
def replace_nodes(self, session, nodes):
body = {
'replace_nodes': {
'nodes': nodes,
}
}
return self.action(session, body)
def scale_out(self, session, count=None):
body = {
'scale_out': {
'count': count,
}
}
return self.action(session, body)
def scale_in(self, session, count=None):
body = {
'scale_in': {
'count': count,
}
}
return self.action(session, body)
def resize(self, session, **params):
body = {
'resize': params
}
return self.action(session, body)
def policy_attach(self, session, policy_id, **params):
data = {'policy_id': policy_id}
data.update(params)
body = {
'policy_attach': data
}
return self.action(session, body)
def policy_detach(self, session, policy_id):
body = {
'policy_detach': {
'policy_id': policy_id,
}
}
return self.action(session, body)
def policy_update(self, session, policy_id, **params):
data = {'policy_id': policy_id}
data.update(params)
body = {
'policy_update': data
}
return self.action(session, body)
def check(self, session, **params):
body = {
'check': params
}
return self.action(session, body)
def recover(self, session, **params):
body = {
'recover': params
}
return self.action(session, body)
def op(self, session, operation, **params):
"""Perform an operation on the cluster.
:param session: A session object used for sending request.
:param operation: A string representing the operation to be performed.
:param dict params: An optional dict providing the parameters for the
operation.
:returns: A dictionary containing the action ID.
"""
url = utils.urljoin(self.base_path, self.id, 'ops')
resp = session.post(url, endpoint_filter=self.service,
json={operation: params})
return resp.json()
| apache-2.0 |
vpistis/soapfish | soapfish/soap12.py | 1 | 2999 | # -*- coding: utf-8 -*-
from . import namespaces as ns
from . import soap11
from . import xsd
ENVELOPE_NAMESPACE = ns.soap12_envelope
BINDING_NAMESPACE = ns.wsdl_soap12
CONTENT_TYPE = 'application/soap+xml'
NAME = 'soap12'
# --- Functions ---------------------------------------------------------------
def determine_soap_action(request):
content_types = request.environ.get('CONTENT_TYPE','').split(';')
for content_type in content_types:
if content_type.strip(' ').startswith('action='):
action = content_type.split('=')[1]
return action.replace('"', '')
return None
def build_http_request_headers(soapAction):
return {'content-type': CONTENT_TYPE + ';action="%s"' % soapAction}
def get_error_response(code, message, actor=None, header=None):
return Envelope.error_response(code, message, actor=actor, header=header)
def parse_fault_message(fault):
return fault.Code.Value, fault.Reason.Text, fault.Role
class Header(soap11.Header):
pass
class Code(xsd.ComplexType):
CLIENT = 'ns0:Sender'
SERVER = 'ns0:Receiver'
Value = xsd.Element(xsd.String)
class LanguageString(xsd.String):
def render(self, parent, value, namespace, elementFormDefault):
parent.text = self.xmlvalue(value)
parent.set('{%s}lang' % ns.xml, 'en')
class Reason(xsd.ComplexType):
Text = xsd.Element(LanguageString)
class Fault(xsd.ComplexType):
'''
SOAP Envelope Fault.
'''
Code = xsd.Element(Code)
Reason = xsd.Element(Reason)
Role = xsd.Element(xsd.String, minOccurs=0)
class Body(soap11.Body):
'''
SOAP Envelope Body.
'''
message = xsd.ClassNamedElement(xsd.NamedType, minOccurs=0)
Fault = xsd.Element(Fault, minOccurs=0)
class Envelope(xsd.ComplexType):
'''
SOAP Envelope.
'''
Header = xsd.Element(Header, nillable=True)
Body = xsd.Element(Body)
@classmethod
def response(cls, tagname, return_object, header=None):
envelope = cls()
if header is not None:
envelope.Header = header
envelope.Body = Body()
envelope.Body.message = xsd.NamedType(name=tagname, value=return_object)
return envelope.xml('Envelope', namespace=ENVELOPE_NAMESPACE,
elementFormDefault=xsd.ElementFormDefault.QUALIFIED)
@classmethod
def error_response(cls, code, message, header=None, actor=None):
envelope = cls()
if header is not None:
envelope.Header = header
envelope.Body = Body()
code = Code(Value=code)
reason = Reason(Text=message)
envelope.Body.Fault = Fault(Code=code, Reason=reason, Role=actor)
return envelope.xml('Envelope', namespace=ENVELOPE_NAMESPACE,
elementFormDefault=xsd.ElementFormDefault.QUALIFIED)
SCHEMA = xsd.Schema(
targetNamespace=ENVELOPE_NAMESPACE,
elementFormDefault=xsd.ElementFormDefault.QUALIFIED,
complexTypes=[Header, Body, Envelope, Code, Reason, Fault],
)
| bsd-3-clause |
zhenglaizhang/python | renzongxian/0006/0006.py | 40 | 1367 | # Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-12-08
# Python 3.4
"""
第 0006 题:你有一个目录,放了你一个月的日记,都是 txt,为了避免分词的问题,假设内容都是英文,请统计出你认为每篇日记最重要的词。
"""
import os
import sys
import re
def important_word(target_file):
file_object = open(target_file, 'r')
file_content = file_object.read()
# Split the string
p = re.compile(r'[\W\d]*')
word_list = p.split(file_content)
word_dict = {}
for word in word_list:
if word not in word_dict:
word_dict[word] = 1
else:
word_dict[word] += 1
sort = sorted(word_dict.items(), key=lambda e: e[1], reverse=True)
print("The most word in '%s' is '%s', it appears %s times" % (target_file, sort[0][0], sort[0][1]))
print("The second most word in '%s' is '%s', it appears %s times" % (target_file, sort[1][0], sort[1][1]))
file_object.close()
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0006.py $dir_path'")
else:
for dir_path in sys.argv[1:]:
for file_name in os.listdir(dir_path):
file_path = os.path.join(dir_path, file_name)
important_word(file_path) | mit |
pas256/troposphere | examples/DynamoDB_Table.py | 2 | 2188 | # Converted from DynamoDB_Table.template located at:
# http://aws.amazon.com/cloudformation/aws-cloudformation-templates/
from troposphere import Output, Parameter, Ref, Template
from troposphere.dynamodb import (KeySchema, AttributeDefinition,
ProvisionedThroughput)
from troposphere.dynamodb import Table
t = Template()
t.add_description("AWS CloudFormation Sample Template: This template "
"demonstrates the creation of a DynamoDB table.")
hashkeyname = t.add_parameter(Parameter(
"HashKeyElementName",
Description="HashType PrimaryKey Name",
Type="String",
AllowedPattern="[a-zA-Z0-9]*",
MinLength="1",
MaxLength="2048",
ConstraintDescription="must contain only alphanumberic characters"
))
hashkeytype = t.add_parameter(Parameter(
"HashKeyElementType",
Description="HashType PrimaryKey Type",
Type="String",
Default="S",
AllowedPattern="[S|N]",
MinLength="1",
MaxLength="1",
ConstraintDescription="must be either S or N"
))
readunits = t.add_parameter(Parameter(
"ReadCapacityUnits",
Description="Provisioned read throughput",
Type="Number",
Default="5",
MinValue="5",
MaxValue="10000",
ConstraintDescription="should be between 5 and 10000"
))
writeunits = t.add_parameter(Parameter(
"WriteCapacityUnits",
Description="Provisioned write throughput",
Type="Number",
Default="10",
MinValue="5",
MaxValue="10000",
ConstraintDescription="should be between 5 and 10000"
))
myDynamoDB = t.add_resource(Table(
"myDynamoDBTable",
AttributeDefinitions=[
AttributeDefinition(
AttributeName=Ref(hashkeyname),
AttributeType=Ref(hashkeytype)
),
],
KeySchema=[
KeySchema(
AttributeName=Ref(hashkeyname),
KeyType="HASH"
)
],
ProvisionedThroughput=ProvisionedThroughput(
ReadCapacityUnits=Ref(readunits),
WriteCapacityUnits=Ref(writeunits)
)
))
t.add_output(Output(
"TableName",
Value=Ref(myDynamoDB),
Description="Table name of the newly create DynamoDB table",
))
print(t.to_json())
| bsd-2-clause |
alu0100207385/dsi_3Django | django/db/backends/postgresql_psycopg2/introspection.py | 105 | 5005 | from __future__ import unicode_literals
from django.db.backends import BaseDatabaseIntrospection, FieldInfo
from django.utils.encoding import force_text
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: 'BooleanField',
17: 'BinaryField',
20: 'BigIntegerField',
21: 'SmallIntegerField',
23: 'IntegerField',
25: 'TextField',
700: 'FloatField',
701: 'FloatField',
869: 'GenericIPAddressField',
1042: 'CharField', # blank-padded
1043: 'CharField',
1082: 'DateField',
1083: 'TimeField',
1114: 'DateTimeField',
1184: 'DateTimeField',
1266: 'TimeField',
1700: 'DecimalField',
}
ignored_tables = []
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
cursor.execute("""
SELECT c.relname
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r', 'v', '')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)""")
return [row[0] for row in cursor.fetchall() if row[0] not in self.ignored_tables]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
# As cursor.description does not return reliably the nullable property,
# we have to query the information_schema (#7783)
cursor.execute("""
SELECT column_name, is_nullable
FROM information_schema.columns
WHERE table_name = %s""", [table_name])
null_map = dict(cursor.fetchall())
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return [FieldInfo(*((force_text(line[0]),) + line[1:6] + (null_map[force_text(line[0])]=='YES',)))
for line in cursor.description]
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
cursor.execute("""
SELECT con.conkey, con.confkey, c2.relname
FROM pg_constraint con, pg_class c1, pg_class c2
WHERE c1.oid = con.conrelid
AND c2.oid = con.confrelid
AND c1.relname = %s
AND con.contype = 'f'""", [table_name])
relations = {}
for row in cursor.fetchall():
# row[0] and row[1] are single-item lists, so grab the single item.
relations[row[0][0] - 1] = (row[1][0] - 1, row[2])
return relations
def get_key_columns(self, cursor, table_name):
key_columns = []
cursor.execute("""
SELECT kcu.column_name, ccu.table_name AS referenced_table, ccu.column_name AS referenced_column
FROM information_schema.constraint_column_usage ccu
LEFT JOIN information_schema.key_column_usage kcu
ON ccu.constraint_catalog = kcu.constraint_catalog
AND ccu.constraint_schema = kcu.constraint_schema
AND ccu.constraint_name = kcu.constraint_name
LEFT JOIN information_schema.table_constraints tc
ON ccu.constraint_catalog = tc.constraint_catalog
AND ccu.constraint_schema = tc.constraint_schema
AND ccu.constraint_name = tc.constraint_name
WHERE kcu.table_name = %s AND tc.constraint_type = 'FOREIGN KEY'""" , [table_name])
key_columns.extend(cursor.fetchall())
return key_columns
def get_indexes(self, cursor, table_name):
# This query retrieves each index on the given table, including the
# first associated field name
cursor.execute("""
SELECT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2,
pg_catalog.pg_index idx, pg_catalog.pg_attribute attr
WHERE c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND attr.attnum = idx.indkey[0]
AND c.relname = %s""", [table_name])
indexes = {}
for row in cursor.fetchall():
# row[1] (idx.indkey) is stored in the DB as an array. It comes out as
# a string of space-separated integers. This designates the field
# indexes (1-based) of the fields that have indexes on the table.
# Here, we skip any indexes across multiple fields.
if ' ' in row[1]:
continue
indexes[row[0]] = {'primary_key': row[3], 'unique': row[2]}
return indexes
| bsd-3-clause |
trea-uy/django-survey-sample | sampleSurveys/tests/test_4logic.py | 1 | 8876 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import Select
from django.test import LiveServerTestCase
from selenium import webdriver
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.test import Client
from dynamicForms.fields import PUBLISHED, DRAFT
from dynamicForms.models import Form,Version,FormEntry,FieldEntry
from dynamicForms.views import FillForm
from selenium.webdriver.common.alert import Alert
from selenium.webdriver.firefox.webdriver import WebDriver
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import Select
import time
import json
class New_logic(LiveServerTestCase):
def setUp(self):
self.driver=webdriver.Firefox()
self.driver.maximize_window()
self.driver.implicitly_wait(10)
self.user = User.objects.create_user('test', 'email', 'test')
self.user.save()
def test_login(self):
base_url = "http://localhost:8081/surveys/"
# login web
self.driver.get(base_url + "login/")
elem = self.driver.find_element_by_name("username")
elem.send_keys("test")
elem = self.driver.find_element_by_name("password")
elem.send_keys("test")
elem.send_keys(Keys.RETURN)
#click in new form
self.driver.find_element_by_link_text('New form').click()
#put the title
elem = self.driver.find_element_by_name('title')
elem.send_keys("FormLogic")
#---------------------------------------------------------------------
#click in palette and Number button
self.driver.find_element_by_link_text('Palette').click()
self.driver.find_element_by_id('NumberField').click()
time.sleep(1)
#click in field add
self.driver.find_element_by_xpath("//ul/li[1]/div/div/label").click()
#select field in properties and put the question
elem = self.driver.find_element_by_xpath("//div[@id='tab2']/div/div/div[2]/div/input")
elem.send_keys("Age:")
##--------------------------------------------------------------------
#click in palette and Email button
self.driver.find_element_by_link_text('Palette').click()
self.driver.find_element_by_id('EmailField').click()
self.driver.execute_script("window.scrollBy(0,-100);")
time.sleep(1)
#click in field add
self.driver.find_element_by_xpath("//ul/li[2]/div/div/label").click()
#select field in properties and put the question
elem = self.driver.find_element_by_xpath("//div[@id='tab2']/div/div/div[2]/div[1]/input")
elem.send_keys("Sign in E-mail ADULT:")
#LOGIC EMAIL MAYOR
#Click en Config
self.driver.find_element_by_link_text('Config').click()
#Click in field logic
self.driver.find_element_by_xpath("//div[@id='tab3']/div[2]/button").click()
time.sleep(1)
#Click in add logic button
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div[2]/button")
elem.send_keys(Keys.RETURN)
#Select field Sign in Age:
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[1]/select")
elem.click()
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.ENTER)
# select condition greater_than_or_equal
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[2]/select")
elem.click()
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.ENTER)
#put number
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[3]/input")
elem.send_keys("18")
#Click Apply
self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[5]/button[1]").click()
##--------------------------------------------------------------------
#click in palette and CI button
time.sleep(1)
elem = self.driver.find_element_by_link_text('Palette')
elem.send_keys(Keys.RETURN)
self.driver.find_element_by_id('CIField').click()
time.sleep(1)
#click in field add
self.driver.find_element_by_xpath("//ul/li[3]/div/div/label").click()
time.sleep(1)
#select field in properties and put the question
elem = self.driver.find_element_by_xpath("//div[@id='tab2']/div/div/div[2]/div[1]/input")
time.sleep(2)
elem.send_keys("Sign in CI UNDER-AGE")
# LOGIC CI MENOR
#Click en Config
self.driver.find_element_by_link_text('Config').click()
#Click in field logic
self.driver.find_element_by_xpath("//div[@id='tab3']/div[2]/button").click()
time.sleep(1)
#Click in add logic button
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div[2]/button")
elem.send_keys(Keys.RETURN)
#Select field Sign in Age:
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[1]/select")
elem.click()
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.ENTER)
# select condition less_than_or_equal
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[2]/select")
elem.click()
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.DOWN)
elem.send_keys(Keys.ENTER)
#put number
elem = self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[3]/div/div[3]/input")
elem.send_keys("17")
#Click Apply
self.driver.find_element_by_xpath("//div[@id='myLogicModal']/div/div/div[2]/div[5]/button[1]").click()
#publish
self.driver.find_element_by_xpath("//form/div[2]/div[2]/button[1]").send_keys(Keys.RETURN)
Alert(self.driver).accept()
time.sleep(10)
form=Form.objects.get(title='FormLogic')
vers=Version.objects.get(form=form)
js='{"pages": [{"fields": [{"text": "Age:", "dependencies": {"fields": ["2", "3"], "pages": []}, "tooltip": "", "required": false, "validations": {"max_number": null, "min_number": null}, "answer": [], "field_type": "NumberField", "field_id": 1}, {"text": "Sign in E-mail ADULT:", "dependencies": {"fields": [], "pages": []}, "tooltip": "", "required": false, "validations": {"max_len_text": 255}, "answer": [], "field_type": "EmailField", "field_id": 2}, {"text": "Sign in CI UNDER-AGE", "dependencies": {"fields": [], "pages": []}, "tooltip": "", "required": false, "validations": {}, "answer": [], "field_type": "CIField", "field_id": 3}], "subTitle": ""}], "after_submit": {"mailSender": "", "mailSubject": "", "message": "Thank you. You successfully filled the form!", "redirect": "http://", "sendMail": false, "action": "Show Message", "mailText": "", "mailRecipient": ""}, "logic": {"fields": {"2": {"action": "All", "operation": "Show", "conditions": [{"field": 1, "operatorsList": ["greater_than", "greater_than_or_equal", "equal", "not_equal", "less_than_or_equal", "less_than"], "comparator": "greater_than_or_equal", "value": "18", "field_type": "NumberField", "operandKind": "input"}]}, "3": {"action": "All", "operation": "Show", "conditions": [{"field": 1, "operatorsList": ["greater_than", "greater_than_or_equal", "equal", "not_equal", "less_than_or_equal", "less_than"], "comparator": "less_than_or_equal", "value": "17", "field_type": "NumberField", "operandKind": "input"}]}}, "pages": {}}}'
js = json.loads(js)
self.assertEquals(json.loads(vers.json),js)
def tearDown(self):
self.driver.close()
| apache-2.0 |
worsht/antlr4 | runtime/Python2/src/antlr4/LL1Analyzer.py | 14 | 8968 | #
# [The "BSD license"]
# Copyright (c) 2012 Terence Parr
# Copyright (c) 2012 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#/
from antlr4.IntervalSet import IntervalSet, Interval
from antlr4.Token import Token
from antlr4.PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
from antlr4.atn.ATNConfig import ATNConfig
from antlr4.atn.ATNState import ATNState, RuleStopState
from antlr4.atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
class LL1Analyzer (object):
#* Special value added to the lookahead sets to indicate that we hit
# a predicate during analysis if {@code seeThruPreds==false}.
#/
HIT_PRED = Token.INVALID_TYPE
def __init__(self, atn):
self.atn = atn
#*
# Calculates the SLL(1) expected lookahead set for each outgoing transition
# of an {@link ATNState}. The returned array has one element for each
# outgoing transition in {@code s}. If the closure from transition
# <em>i</em> leads to a semantic predicate before matching a symbol, the
# element at index <em>i</em> of the result will be {@code null}.
#
# @param s the ATN state
# @return the expected symbols for each outgoing transition of {@code s}.
#/
def getDecisionLookahead(self, s):
if s is None:
return None
count = len(s.transitions)
look = [] * count
for alt in range(0, count):
look[alt] = set()
lookBusy = set()
seeThruPreds = False # fail to get lookahead upon pred
self._LOOK(s.transition(alt).target, None, PredictionContext.EMPTY, \
look[alt], lookBusy, set(), seeThruPreds, False)
# Wipe out lookahead for this alternative if we found nothing
# or we had a predicate when we !seeThruPreds
if len(look[alt])==0 or self.HIT_PRED in look[alt]:
look[alt] = None
return look
#*
# Compute set of tokens that can follow {@code s} in the ATN in the
# specified {@code ctx}.
#
# <p>If {@code ctx} is {@code null} and the end of the rule containing
# {@code s} is reached, {@link Token#EPSILON} is added to the result set.
# If {@code ctx} is not {@code null} and the end of the outermost rule is
# reached, {@link Token#EOF} is added to the result set.</p>
#
# @param s the ATN state
# @param stopState the ATN state to stop at. This can be a
# {@link BlockEndState} to detect epsilon paths through a closure.
# @param ctx the complete parser context, or {@code null} if the context
# should be ignored
#
# @return The set of tokens that can follow {@code s} in the ATN in the
# specified {@code ctx}.
#/
def LOOK(self, s, stopState=None, ctx=None):
r = IntervalSet()
seeThruPreds = True # ignore preds; get all lookahead
lookContext = PredictionContextFromRuleContext(s.atn, ctx) if ctx is not None else None
self._LOOK(s, stopState, lookContext, r, set(), set(), seeThruPreds, True)
return r
#*
# Compute set of tokens that can follow {@code s} in the ATN in the
# specified {@code ctx}.
#
# <p>If {@code ctx} is {@code null} and {@code stopState} or the end of the
# rule containing {@code s} is reached, {@link Token#EPSILON} is added to
# the result set. If {@code ctx} is not {@code null} and {@code addEOF} is
# {@code true} and {@code stopState} or the end of the outermost rule is
# reached, {@link Token#EOF} is added to the result set.</p>
#
# @param s the ATN state.
# @param stopState the ATN state to stop at. This can be a
# {@link BlockEndState} to detect epsilon paths through a closure.
# @param ctx The outer context, or {@code null} if the outer context should
# not be used.
# @param look The result lookahead set.
# @param lookBusy A set used for preventing epsilon closures in the ATN
# from causing a stack overflow. Outside code should pass
# {@code new HashSet<ATNConfig>} for this argument.
# @param calledRuleStack A set used for preventing left recursion in the
# ATN from causing a stack overflow. Outside code should pass
# {@code new BitSet()} for this argument.
# @param seeThruPreds {@code true} to true semantic predicates as
# implicitly {@code true} and "see through them", otherwise {@code false}
# to treat semantic predicates as opaque and add {@link #HIT_PRED} to the
# result if one is encountered.
# @param addEOF Add {@link Token#EOF} to the result if the end of the
# outermost context is reached. This parameter has no effect if {@code ctx}
# is {@code null}.
#/
def _LOOK(self, s, stopState , ctx, look, lookBusy, \
calledRuleStack, seeThruPreds, addEOF):
c = ATNConfig(s, 0, ctx)
if c in lookBusy:
return
lookBusy.add(c)
if s == stopState:
if ctx is None:
look.addOne(Token.EPSILON)
return
elif ctx.isEmpty() and addEOF:
look.addOne(Token.EOF)
return
if isinstance(s, RuleStopState ):
if ctx is None:
look.addOne(Token.EPSILON)
return
elif ctx.isEmpty() and addEOF:
look.addOne(Token.EOF)
return
if ctx != PredictionContext.EMPTY:
# run thru all possible stack tops in ctx
for i in range(0, len(ctx)):
returnState = self.atn.states[ctx.getReturnState(i)]
removed = returnState.ruleIndex in calledRuleStack
try:
calledRuleStack.discard(returnState.ruleIndex)
self._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
finally:
if removed:
calledRuleStack.add(returnState.ruleIndex)
return
for t in s.transitions:
if type(t) == RuleTransition:
if t.target.ruleIndex in calledRuleStack:
continue
newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber)
try:
calledRuleStack.add(t.target.ruleIndex)
self._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
finally:
calledRuleStack.remove(t.target.ruleIndex)
elif isinstance(t, AbstractPredicateTransition ):
if seeThruPreds:
self._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
else:
look.addOne(self.HIT_PRED)
elif t.isEpsilon:
self._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
elif type(t) == WildcardTransition:
look.addRange( Interval(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType + 1) )
else:
set = t.label
if set is not None:
if isinstance(t, NotSetTransition):
set = set.complement(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType)
look.addSet(set)
| bsd-3-clause |
etetoolkit/ete | ete3/tools/ete_build_lib/configcheck.py | 3 | 18183 | # #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://etetoolkit.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2015).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC
# Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit may be available in the documentation.
#
# More info at http://etetoolkit.org. Contact: huerta@embl.de
#
#
# #END_LICENSE#############################################################
from __future__ import absolute_import
from __future__ import print_function
import os
from ...utils import print_table
from .configobj import ConfigObj
from .errors import ConfigError
from .utils import colorify
from .apps import APP2CLASS, OPTION2APPTYPE, APPTYPES
import six
from six.moves import map
def build_supermatrix_workflow(wname):
try:
cog_selector, alg_concatenator, treebuilder = map(lambda x: "@%s" %x, wname.split("-"))
except ValueError:
raise ConfigError("Invalid supermatrix workflow: %s" %wname)
workflow = {wname: {
"_app": "supermatrix",
"_cog_selector": cog_selector,
"_alg_concatenator": alg_concatenator,
"_aa_tree_builder": treebuilder,
"_nt_tree_builder": treebuilder,
"_appset":"@builtin_apps"}
}
return workflow
def build_genetree_workflow(wname):
try:
aligner, trimmer, modeltester, treebuilder = map(lambda x: "none" if x.lower() == "none" else "@%s"%x,
wname.split("-"))
except ValueError:
raise ConfigError("Invalid genetree workflow: %s" %wname)
workflow = {wname: {
"_app": "genetree",
"_aa_aligner": aligner,
"_aa_alg_cleaner": trimmer,
"_aa_model_tester": modeltester,
"_aa_tree_builder": treebuilder,
"_nt_aligner": aligner,
"_nt_alg_cleaner": trimmer,
"_nt_model_tester": modeltester,
"_nt_tree_builder": treebuilder,
"_appset":"@builtin_apps"}
}
return workflow
def list_workflows(config, target_type=None):
if not target_type or target_type == 'supermatrix':
print()
avail_meta = [(k, config["workflow_desc"].get(k, ""), len(v)) for k,v in six.iteritems(config.get('supermatrix_meta_workflow', {}))]
print_table(avail_meta, fix_col_width=[45, 60, 10], header=["Worflow name", "Description", "threads"], title="Supermatrix shortcut workflow names", row_line=True)
if not target_type or target_type == 'genetree':
print()
avail_meta = [(k, config["workflow_desc"].get(k, ""), len(v)) for k,v in six.iteritems(config.get('genetree_meta_workflow', {}))]
print_table(avail_meta, fix_col_width=[45, 60, 10], header=["Worflow name", "Description", "threads"], title="GeneTree shortcut workflow names", row_line=True)
def list_apps(config, target_apps = None):
for appname, validapps in APPTYPES.items():
if target_apps:
if appname not in target_apps and target_apps & validapps:
validapps = target_apps & validapps
elif appname in target_apps:
pass
else:
continue
avail_blocks = [[blockname, block["_app"], block.get("_desc", "")] for blockname, block in config.items() if block.get("_app") in validapps]
print_table(avail_blocks, header=["name", "app type", "desc."], max_col_width=70, title=appname, row_line=True)
print()
def block_detail(block_name, config, color=True):
blocks_to_show = {}
iterable_types = set([set, list, tuple, frozenset])
if block_name not in config:
try:
next_block = [x.lstrip('@') for x in config.get('genetree_meta_workflow', {})[block_name]]
metaworkflow = True
except Exception as e:
print(e)
raise ValueError('block name not found [%s]' %block_name)
else:
metaworkflow = False
next_block = [block_name]
pos = 0
while next_block:
block = next_block.pop()
blocks_to_show[block] = pos
for k1, v1 in six.iteritems(config[block]):
if type(v1) in iterable_types:
for v2 in v1:
if isinstance(v2, str) and v2.startswith('@'):
next_block.append(v2[1:])
elif isinstance(v1, str) and v1.startswith('@'):
next_block.append(v1[1:])
pos += 1
if metaworkflow and color:
print(colorify('[genetree_meta_workflow]', 'yellow'))
print("%s = %s" %(block_name, ', '.join(config["genetree_meta_workflow"][block_name])))
print()
elif metaworkflow:
print('[genetree_meta_workflow]')
print("%s = %s" %(block_name, ', '.join(config["genetree_meta_workflow"][block_name])))
print()
for b, pos in sorted(list(blocks_to_show.items()), key=lambda x: x[1]):
if b == "builtin_apps":
continue
if color:
print(colorify('[%s]' %b, 'green'))
else:
print('[%s]' %b)
for k,v in sorted(config[b].items()):
if k == "_inherits":
continue
if type(v) in iterable_types:
v = ', '.join(map(str, v))+','
if color:
if k == '_app':
print(colorify('% 35s = %s' %(k, v), "lblue"))
else:
print('%s = %s' %(colorify("% 35s" %k, "orange"), v))
else:
print('% 40s = %s' %(k, v))
print()
def parse_block(blockname, conf):
blocktype = conf[blockname].get('_app', 'unknown')
for attr, v in list(conf[blockname].items()):
conf[blockname][attr] = check_type(blocktype, attr, v)
if isinstance(conf[blockname][attr], list):
for i in conf[blockname][attr]:
check_block_link(conf, blockname, i, attr)
else:
check_block_link(conf, blockname, conf[blockname][attr], attr)
# Check for missing attributes
for tag, tester in six.iteritems(CHECKERS):
if tag[0] == blocktype and (tester[2] and tag[1] not in conf[blockname]):
raise ConfigError('[%s] attribute expected in block [%s]' %(tag[1], blockname))
def check_config(fname):
conf = ConfigObj(fname, list_values=True)
# expand meta_workflows
for meta_name, meta_wf in conf["genetree_meta_workflow"].items():
for wkname in meta_wf:
conf.update(build_genetree_workflow(wkname.lstrip("@")))
for meta_name, meta_wf in conf["supermatrix_meta_workflow"].items():
for wkname in meta_wf:
conf.update(build_supermatrix_workflow(wkname.lstrip("@")))
# expand inherits options
for k, v in list(conf.items()):
if '_inherits' in v:
base = v['_inherits']
try:
new_dict = dict(conf[base])
except KeyError:
raise ConfigError('[%s] config block is referred in [%s] but not present in config file' %(base, k))
new_dict.update(v)
conf[k] = new_dict
# check blocks
for blockname in list(conf.keys()):
parse_block(blockname, conf)
# Check that the number of columns in main workflow definition is the same in all attributes
for flow_name in six.iterkeys(conf):
if conf[flow_name].get("_app", "") != "main":
continue
npr_config = [len(v) for k, v in six.iteritems(conf[flow_name])
if type(v) == list and k != "target_levels"]
if len(set(npr_config)) != 1:
raise ConfigError("List values in [%s] should all have the same length" %flow_name)
return conf
def check_type(blocktype, attr, v):
tag = (blocktype, attr)
if tag in CHECKERS:
tester, kargs, required = CHECKERS[tag]
return tester(v, **kargs)
else:
return v
def check_block_link(conf, parent, v, attr_name):
if isinstance(v, str) and v.startswith('@'):
if v[1:] not in conf:
raise ConfigError('[%s] config block referred in [%s] but not found in config' %(v, parent))
if attr_name in OPTION2APPTYPE and conf[v[1:]]["_app"] not in OPTION2APPTYPE[attr_name]:
raise ConfigError('[%s] is not valid [%s] application block' %(v, attr_name))
def is_file(value):
if os.path.isfile(value):
return value
else:
raise ConfigError("Not valid file")
def is_dir(value):
if os.path.isdir(value):
return value
else:
raise ConfigError("Not valid file")
def check_number(value, cast, minv=0, maxv=None):
try:
typed_value = cast(value)
except ValueError:
raise ConfigError("Expected [%s] number. Found [%s]" %(cast, value))
else:
if (minv is not None and typed_value < cast(minv)) or \
(maxv is not None and typed_value > cast(maxv)):
_minv = minv if minv is not None else "any"
_maxv = maxv if maxv is not None else "any"
raise ConfigError("[%s] not in the range (%s,%s)" %
(value, _minv, _maxv))
return typed_value
def is_set(value):
if not isinstance(value, list):
raise ConfigError("Expected a list of values. Found [%s]" %value)
return set(value)
def is_appset_entry(value):
if not isinstance(value, list) or len(value) != 2:
raise ConfigError("unexpected application format [%s]. Expected [appname, maxcpus] format" %value)
try:
cores = int(value[2])
except ValueError:
raise ConfigError("unexpected application format [%s]. Expected [appname, maxcpus] format (maxcpus as integer)" %value)
return [value[0], cores]
def is_float_list(value, minv=0, maxv=None):
is_list(value)
typed_value = []
for v in value:
typed_value.append(check_number(v, float, minv, maxv))
return typed_value
def is_integer_list(value, minv=0, maxv=None):
is_list(value)
typed_value = []
for v in value:
typed_value.append(check_number(v, int, minv, maxv))
return typed_value
def is_float(value, minv=None, maxv=None):
return check_number(value, float, minv, maxv)
def is_integer(value, minv=None, maxv=None):
return check_number(value, int, minv, maxv)
def is_list(value):
if not isinstance(value, list):
raise ConfigError("[%s] is not a list" %value)
return value
def is_app_link(value, allow_none=True):
if allow_none and value == 'none':
return value
elif value.startswith('@'):
return value
else:
raise ConfigError('[%s] is not a valid block link' %value)
def is_app_list(value, allow_none=True):
is_list(value)
for v in value:
is_app_link(v, allow_none=allow_none)
return value
def is_boolean(value):
if str(value).lower() in set(["1", "true", "yes"]):
return True
elif str(value).lower() in set(["0", "false", "no"]):
return False
else:
raise ConfigError('[%s] is not a boolean value' %(value))
def is_integer_list(value, maxv=None, minv=None):
is_list(value)
return [is_integer(v, maxv=maxv, minv=minv) for v in value]
def is_correlative_integer_list(value, minv=None, maxv=None):
is_list(value)
typed_value = []
last_value = 0
for v in value:
cv = is_integer(v, minv=None, maxv=None)
typed_value.append(cv)
if cv <= last_value:
raise ConfigError("[%s] Numeric values are not correlative" %value)
last_value = cv
return typed_value
def is_text(value):
if isinstance(value, str):
return value
else:
raise ConfigError("[%s] is not a valid text string" %value)
def is_percent(value):
try:
is_float(value.rstrip('%'))
except ConfigError:
raise ConfigError('[%s] should a percentage value (i.e. 0.4%%)' %value)
return value
def is_integer_or_percent(value):
try:
return is_integer(value)
except ConfigError:
try:
is_percent(value)
except ConfigError:
raise ConfigError('[%s] should be an integer or a percentage (i.e. 15 or 0.4%%)' %value)
return value
def is_choice(value, choices):
if value in choices:
return value
else:
raise ConfigError('[%s] should be one of %s' %(value, choices))
def is_raxml_bootstrap(value):
try:
return is_integer(value)
except ValueError:
if value == 'alrt' or value == 'alrt_phyml':
return value
else:
raise ConfigError('[%s] bootstrap value should an integer, "alrt" or "phyml_alrt"' %(value))
CHECKERS = {
# (app_name, attr_name): (checker_fn, args, required_attr)
("main", "_npr"): (is_app_list, {}, True),
("main", "_workflow"): (is_app_list, {"allow_none":False}, True),
("main", "_appset"): (is_app_link, {"allow_none":False}, True),
("npr", "_max_iters"): (is_integer, {"minv":1}, True),
("npr", "_switch_aa_similarity"): (is_float, {"minv":0, "maxv":1}, True),
("npr", "_max_seq_similarity"): (is_float, {"minv":0, "maxv":1}, True),
("npr", "_min_branch_support"): (is_float, {"minv":0, "maxv":1}, True),
("npr", "_min_npr_size"): (is_integer, {"minv":3}, True),
("npr", "_tree_splitter"): (is_app_link, {}, True),
("npr", "_target_levels"): (is_list, {}, False),
("genetree", "_aa_aligner"): (is_app_link, {}, True),
("genetree", "_nt_aligner"): (is_app_link, {}, True),
("genetree", "_aa_alg_cleaner"): (is_app_link, {}, True),
("genetree", "_nt_alg_cleaner"): (is_app_link, {}, True),
("genetree", "_aa_model_tester"): (is_app_link, {}, True),
("genetree", "_nt_model_tester"): (is_app_link, {}, True),
("genetree", "_aa_tree_builder"): (is_app_link, {}, True),
("genetree", "_nt_tree_builder"): (is_app_link, {}, True),
("genetree", "_appset"): (is_app_link, {"allow_none":False}, True),
("supermatrix", "_cog_selector"): (is_app_link, {}, True),
("supermatrix", "_alg_concatenator"): (is_app_link, {}, True),
("supermatrix", "_aa_tree_builder"): (is_app_link, {}, True),
("supermatrix", "_nt_tree_builder"): (is_app_link, {}, True),
("genetree", "_appset"): (is_app_link, {"allow_none":False}, True),
("concatalg", "_default_aa_model"): (is_text, {}, True),
("concatalg", "_default_nt_model"): (is_text, {}, True),
#("concatalg", "_workflow"): (is_app_link, {"allow_none":False}, True),
("cogselector", "_species_missing_factor"): (is_float, {"minv":0, "maxv":1}, True),
("cogselector", "_max_cogs"): (is_integer, {"minv":1}, True),
("treesplitter", "_max_outgroup_size"): (is_integer_or_percent, {}, True),
("treesplitter", "_min_outgroup_support"): (is_float, {"minv":0, "maxv":1}, True),
("treesplitter", "_outgroup_topology_dist"): (is_boolean, {}, True),
("treesplitter", "_first_split"): (is_text, {}, True),
("metaaligner", "_aligners"): (is_app_list, {}, True),
("metaaligner", "_alg_trimming"): (is_boolean, {}, True),
("prottest", "_lk_mode"): (is_choice, {"choices":set(['phyml', 'raxml'])}, True),
("prottest", "_models"): (is_list, {}, True),
("pmodeltest", "_aa_models"): (is_text, {}, True),
("pmodeltest", "_nt_models"): (is_text, {}, True),
("raxml", "_aa_model"): (is_text, {}, True),
("raxml", "_method"): (is_choice, {"choices":set(['GAMMA', 'CAT'])}, True),
("raxml", "_bootstrap"): (is_raxml_bootstrap, {}, True),
("raxml", "_model_suffix"): (is_text, {}, True),
("raxml-sse", "_aa_model"): (is_text, {}, True),
("raxml-sse", "_method"): (is_choice, {"choices":set(['GAMMA', 'CAT'])}, True),
("raxml-sse", "_alrt_calculation"): (is_choice, {"choices":set(['phyml', 'raxml'])}, True),
("raxml-avx", "_aa_model"): (is_text, {}, True),
("raxml-avx", "_method"): (is_choice, {"choices":set(['GAMMA', 'CAT'])}, True),
("raxml-avx", "_alrt_calculation"): (is_choice, {"choices":set(['phyml', 'raxml'])}, True),
("appset", "muscle"): (is_appset_entry, {}, True),
("appset", "mafft"): (is_appset_entry, {}, True),
("appset", "clustalo"): (is_appset_entry, {}, True),
("appset", "trimal"): (is_appset_entry, {}, True),
("appset", "readal"): (is_appset_entry, {}, True),
("appset", "tcoffee"): (is_appset_entry, {}, True),
("appset", "phyml"): (is_appset_entry, {}, True),
("appset", "raxml-pthreads"): (is_appset_entry, {}, True),
("appset", "raxml"): (is_appset_entry, {}, True),
# ("appset", "raxml-pthreads-sse3"): (is_appset_entry, {}, True),
# ("appset", "raxml-sse3"): (is_appset_entry, {}, True),
# ("appset", "raxml-pthreads-avx"): (is_appset_entry, {}, True),
# ("appset", "raxml-avx"): (is_appset_entry, {}, True),
# ("appset", "raxml-pthreads-avx2"): (is_appset_entry, {}, True),
# ("appset", "raxml-avx2"): (is_appset_entry, {}, True),
("appset", "dialigntx"): (is_appset_entry, {}, True),
("appset", "fasttree"): (is_appset_entry, {}, True),
("appset", "statal"): (is_appset_entry, {}, True),
}
| gpl-3.0 |
pilou-/ansible | test/units/module_utils/network/aci/test_aci.py | 22 | 13863 | # -*- coding: utf-8 -*-
# Copyright 2017 Dag Wieers <dag@wieers.com>
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
from units.compat import unittest
from ansible.module_utils.network.aci.aci import ACIModule
from ansible.module_utils.six import PY2, PY3
from ansible.module_utils._text import to_native
import pytest
class AltModule():
params = dict(
hostname='dummy',
port=123,
protocol='https',
state='present',
)
class AltACIModule(ACIModule):
def __init__(self):
self.result = dict(changed=False)
self.module = AltModule
self.params = self.module.params
aci = AltACIModule()
try:
from lxml import etree
if sys.version_info >= (2, 7):
from xmljson import cobra
except ImportError:
pytestmark = pytest.mark.skip("ACI Ansible modules require the lxml and xmljson Python libraries")
class AciRest(unittest.TestCase):
def test_invalid_aci_login(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_valid_aci_login(self):
self.maxDiff = None
imdata = [{
'aaaLogin': {
'attributes': {
'token': 'ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem', # NOQA
'siteFingerprint': 'NdxD72K/uXaUK0wn',
'refreshTimeoutSeconds': '600',
'maximumLifetimeSeconds': '86400',
'guiIdleTimeoutSeconds': '1200',
'restTimeoutSeconds': '90',
'creationTime': '1500134817',
'firstLoginTime': '1500134817',
'userName': 'admin',
'remoteUser': 'false',
'unixUserId': '15374',
'sessionId': 'o7hObsqNTfCmDGcZI5c4ng==',
'lastName': '',
'firstName': '',
'version': '2.0(2f)',
'buildTime': 'Sat Aug 20 23:07:07 PDT 2016',
'node': 'topology/pod-1/node-1',
},
'children': [{
'aaaUserDomain': {
'attributes': {
'name': 'all',
'rolesR': 'admin',
'rolesW': 'admin',
},
'children': [{
'aaaReadRoles': {
'attributes': {},
},
}, {
'aaaWriteRoles': {
'attributes': {},
'children': [{
'role': {
'attributes': {
'name': 'admin',
},
},
}],
},
}],
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-common',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-infra',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-mgmt',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}],
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"aaaLogin":{"attributes":{"token":"ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem","siteFingerprint":"NdxD72K/uXaUK0wn","refreshTimeoutSeconds":"600","maximumLifetimeSeconds":"86400","guiIdleTimeoutSeconds":"1200","restTimeoutSeconds":"90","creationTime":"1500134817","firstLoginTime":"1500134817","userName":"admin","remoteUser":"false","unixUserId":"15374","sessionId":"o7hObsqNTfCmDGcZI5c4ng==","lastName":"","firstName":"","version":"2.0(2f)","buildTime":"Sat Aug 20 23:07:07 PDT 2016","node":"topology/pod-1/node-1"},"children":[{"aaaUserDomain":{"attributes":{"name":"all","rolesR":"admin","rolesW":"admin"},"children":[{"aaaReadRoles":{"attributes":{}}},{"aaaWriteRoles":{"attributes":{},"children":[{"role":{"attributes":{"name":"admin"}}}]}}]}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-common","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-infra","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-mgmt","readPrivileges":"admin","writePrivileges":"admin"}}}]}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">\n<aaaLogin token="ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem" siteFingerprint="NdxD72K/uXaUK0wn" refreshTimeoutSeconds="600" maximumLifetimeSeconds="86400" guiIdleTimeoutSeconds="1200" restTimeoutSeconds="90" creationTime="1500134817" firstLoginTime="1500134817" userName="admin" remoteUser="false" unixUserId="15374" sessionId="o7hObsqNTfCmDGcZI5c4ng==" lastName="" firstName="" version="2.0(2f)" buildTime="Sat Aug 20 23:07:07 PDT 2016" node="topology/pod-1/node-1">\n<aaaUserDomain name="all" rolesR="admin" rolesW="admin">\n<aaaReadRoles/>\n<aaaWriteRoles>\n<role name="admin"/>\n</aaaWriteRoles>\n</aaaUserDomain>\n<DnDomainMapEntry dn="uni/tn-common" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-infra" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-mgmt" readPrivileges="admin" writePrivileges="admin"/>\n</aaaLogin></imdata>\n''' # NOQA
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_invalid_input(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_empty_response(self):
self.maxDiffi = None
if PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 1 (char 0)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
json_response = ''
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. None",
elif etree.LXML_VERSION < (4, 0, 0, 0):
error_text = to_native(u"Unable to parse output as XML, see 'raw' output. None (line 0)", errors='surrogate_or_strict')
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (line 1)"
elif sys.version_info >= (3, 8):
error_text = "Unable to parse output as XML, see 'raw' output. None (line 0)"
else:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (<string>, line 1)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
xml_response = ''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
def test_invalid_response(self):
self.maxDiff = None
if sys.version_info < (2, 7):
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting object: line 1 column 8 (char 8)"
elif PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 9 (char 8)"
error = dict(
code=-1,
text=error_text,
)
raw = '{ "aaa":'
json_response = '{ "aaa":'
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 5" # NOQA
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (line 1)" # NOQA
else:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (<string>, line 1)" # NOQA
error = dict(
code=-1,
text=error_text,
)
raw = '<aaa '
xml_response = '<aaa '
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
| gpl-3.0 |
austin1howard/txt2tags | targets/vimwiki.py | 4 | 3150 | """
A Vimwiki target.
https://code.google.com/p/vimwiki/
"""
from targets import _
NAME = _('Vimwiki document')
TYPE = 'wiki'
HEADER = """\
%%title %(HEADER1)s
## by %(HEADER2)s in %(HEADER3)s
%%toc %(HEADER1)s
"""
TAGS = {
'title1' : '= \a =' ,
'title2' : '== \a ==' ,
'title3' : '=== \a ===' ,
'title4' : '==== \a ====' ,
'title5' : '===== \a =====' ,
'blockVerbOpen' : '{{{' ,
'blockVerbClose' : '}}}' ,
'blockQuoteOpen' : '{{{' ,
'blockQuoteClose' : '}}}' ,
'fontMonoOpen' : '`' ,
'fontMonoClose' : '`' ,
'fontBoldOpen' : ' *' ,
'fontBoldClose' : '* ' ,
'fontItalicOpen' : ' _' ,
'fontItalicClose' : '_ ' ,
#'fontUnderlineOpen' : '<u>' ,
#'fontUnderlineClose' : '</u>' ,
'fontStrikeOpen' : ' ~~' ,
'fontStrikeClose' : '~~ ' ,
'listItemOpen' : '- ' ,
'listItemLine' : '\t' ,
'numlistItemOpen' : '# ' ,
'numlistItemLine' : '\t' ,
'bar1' : '----' ,
'url' : '[\a]' ,
'urlMark' : '[\a \a]' ,
'email' : 'mailto:\a' ,
'emailMark' : '[mailto:\a \a]' ,
'img' : '[\a]' ,
#'_imgAlignLeft' : '|left' ,
#'_imgAlignCenter' : '|center' ,
#'_imgAlignRight' : '|right' ,
'tableRowOpen' : '| ' ,
'tableRowClose' : ' |' ,
#'tableTitleRowOpen' : '|-\n! ' ,
'tableCellSep' : ' | ' ,
#'tableTitleCellSep' : ' | ' ,
#'_tableBorder' : ' border="1"' ,
#'_tableAlignCenter' : ' align="center"' ,
'comment' : '%% \a' ,
'TOC' : '%toc' ,
}
RULES = {
'linkable':1,
'tableable':1,
#'spacedlistitem':1,
#'tablecellstrip':1,
#'autotocwithbars':1,
#'spacedlistitemopen':1,
#'spacednumlistitemopen':1,
#'deflisttextstrip':1,
'autonumberlist':1,
'autonumbertitle':1,
'imgalignable':1,
'keeplistindent':1,
'blanksaroundpara':1,
'blanksaroundverb':1,
# 'blanksaroundquote':1,
#'blanksaroundlist':1,
#'blanksaroundnumlist':1,
#'blanksarounddeflist':1,
'blanksaroundtable':1,
'blanksaroundbar':1,
'blanksaroundtitle':1,
'blanksaroundnumtitle':1,
}
| gpl-2.0 |
duyetdev/openerp-6.1.1 | openerp/addons/edi/__init__.py | 9 | 1370 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import models
import edi_service
from models.edi import EDIMixin, edi_document
# web
try:
import controllers
except ImportError:
logging.getLogger('init.load').warn(
"""Could not load openerp-web section of EDI, EDI will not behave correctly
To fix, launch openerp-web in embedded mode""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fw1121/Pandoras-Toolbox-for-Bioinformatics | src/SPAdes/ext/src/samtools/misc/varfilter.py | 80 | 5783 | #!/software/bin/python
# Author: lh3, converted to python and modified to add -C option by Aylwyn Scally
#
# About:
# varfilter.py is a port of Heng's samtools.pl varFilter script into
# python, with an additional -C INT option. This option sets a minimum
# consensus score, above which the script will output a pileup line
# wherever it _could have_ called a variant, even if none is actually
# called (i.e. hom-ref positions). This is important if you want to
# subsequently merge the calls with those for another individual to get a
# synoptic view of calls at each site. Without this option, and in all
# other respects, it behaves like samtools.pl varFilter.
#
# Aylwyn Scally as6@sanger.ac.uk
# Filtration code:
#
# C low CNS quality (hom-ref only)
# d low depth
# D high depth
# W too many SNPs in a window (SNP only)
# G close to a high-quality indel (SNP only)
# Q low RMS mapping quality (SNP only)
# g close to another indel with higher quality (indel only)
# s low SNP quality (SNP only)
# i low indel quality (indel only)
import sys
import getopt
def usage():
print '''usage: varfilter.py [options] [cns-pileup]
Options: -Q INT minimum RMS mapping quality for SNPs
-q INT minimum RMS mapping quality for gaps
-d INT minimum read depth
-D INT maximum read depth
-S INT minimum SNP quality
-i INT minimum indel quality
-C INT minimum consensus quality for hom-ref sites
-G INT min indel score for nearby SNP filtering
-w INT SNP within INT bp around a gap to be filtered
-W INT window size for filtering dense SNPs
-N INT max number of SNPs in a window
-l INT window size for filtering adjacent gaps
-p print filtered variants'''
def varFilter_aux(first, is_print):
try:
if first[1] == 0:
sys.stdout.write("\t".join(first[4:]) + "\n")
elif is_print:
sys.stderr.write("\t".join(["UQdDWGgsiCX"[first[1]]] + first[4:]) + "\n")
except IOError:
sys.exit()
mindepth = 3
maxdepth = 100
gapgapwin = 30
minsnpmapq = 25
mingapmapq = 10
minindelscore = 25
scorefactor = 100
snpgapwin = 10
densesnpwin = 10
densesnps = 2
printfilt = False
minsnpq = 0
minindelq = 0
mincnsq = 0
try:
options, args = getopt.gnu_getopt(sys.argv[1:], 'pq:d:D:l:Q:w:W:N:G:S:i:C:', [])
except getopt.GetoptError:
usage()
sys.exit(2)
for (oflag, oarg) in options:
if oflag == '-d': mindepth = int(oarg)
if oflag == '-D': maxdepth = int(oarg)
if oflag == '-l': gapgapwin = int(oarg)
if oflag == '-Q': minsnpmapq = int(oarg)
if oflag == '-q': mingapmapq = int(oarg)
if oflag == '-G': minindelscore = int(oarg)
if oflag == '-s': scorefactor = int(oarg)
if oflag == '-w': snpgapwin = int(oarg)
if oflag == '-W': densesnpwin = int(oarg)
if oflag == '-C': mincnsq = int(oarg)
if oflag == '-N': densesnps = int(oarg)
if oflag == '-p': printfilt = True
if oflag == '-S': minsnpq = int(oarg)
if oflag == '-i': minindelq = int(oarg)
if len(args) < 1:
inp = sys.stdin
else:
inp = open(args[0])
# calculate the window size
max_dist = max(gapgapwin, snpgapwin, densesnpwin)
staging = []
for t in (line.strip().split() for line in inp):
(flt, score) = (0, -1)
# non-var sites
if t[3] == '*/*':
continue
is_snp = t[2].upper() != t[3].upper()
if not (is_snp or mincnsq):
continue
# clear the out-of-range elements
while staging:
# Still on the same chromosome and the first element's window still affects this position?
if staging[0][4] == t[0] and int(staging[0][5]) + staging[0][2] + max_dist >= int(t[1]):
break
varFilter_aux(staging.pop(0), printfilt)
# first a simple filter
if int(t[7]) < mindepth:
flt = 2
elif int(t[7]) > maxdepth:
flt = 3
if t[2] == '*': # an indel
if minindelq and minindelq > int(t[5]):
flt = 8
elif is_snp:
if minsnpq and minsnpq> int(t[5]):
flt = 7
else:
if mincnsq and mincnsq > int(t[4]):
flt = 9
# site dependent filters
dlen = 0
if flt == 0:
if t[2] == '*': # an indel
# If deletion, remember the length of the deletion
(a,b) = t[3].split('/')
alen = len(a) - 1
blen = len(b) - 1
if alen>blen:
if a[0] == '-': dlen=alen
elif b[0] == '-': dlen=blen
if int(t[6]) < mingapmapq:
flt = 1
# filtering SNPs
if int(t[5]) >= minindelscore:
for x in (y for y in staging if y[3]):
# Is it a SNP and is it outside the SNP filter window?
if x[0] >= 0 or int(x[5]) + x[2] + snpgapwin < int(t[1]):
continue
if x[1] == 0:
x[1] = 5
# calculate the filtering score (different from indel quality)
score = int(t[5])
if t[8] != '*':
score += scorefactor * int(t[10])
if t[9] != '*':
score += scorefactor * int(t[11])
# check the staging list for indel filtering
for x in (y for y in staging if y[3]):
# Is it a SNP and is it outside the gap filter window
if x[0] < 0 or int(x[5]) + x[2] + gapgapwin < int(t[1]):
continue
if x[0] < score:
x[1] = 6
else:
flt = 6
break
else: # a SNP or hom-ref
if int(t[6]) < minsnpmapq:
flt = 1
# check adjacent SNPs
k = 1
for x in (y for y in staging if y[3]):
if x[0] < 0 and int(x[5]) + x[2] + densesnpwin >= int(t[1]) and (x[1] == 0 or x[1] == 4 or x[1] == 5):
k += 1
# filtering is necessary
if k > densesnps:
flt = 4
for x in (y for y in staging if y[3]):
if x[0] < 0 and int(x[5]) + x[2] + densesnpwin >= int(t[1]) and x[1] == 0:
x[1] = 4
else: # then check gap filter
for x in (y for y in staging if y[3]):
if x[0] < 0 or int(x[5]) + x[2] + snpgapwin < int(t[1]):
continue
if x[0] >= minindelscore:
flt = 5
break
staging.append([score, flt, dlen, is_snp] + t)
# output the last few elements in the staging list
while staging:
varFilter_aux(staging.pop(0), printfilt)
| gpl-3.0 |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/pygments/styles/borland.py | 31 | 1562 | # -*- coding: utf-8 -*-
"""
pygments.styles.borland
~~~~~~~~~~~~~~~~~~~~~~~
Style similar to the style used in the Borland IDEs.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class BorlandStyle(Style):
"""
Style similar to the style used in the borland IDEs.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
Comment.Preproc: 'noitalic #008080',
Comment.Special: 'noitalic bold',
String: '#0000FF',
String.Char: '#800080',
Number: '#0000FF',
Keyword: 'bold #000080',
Operator.Word: 'bold',
Name.Tag: 'bold #000080',
Name.Attribute: '#FF0000',
Generic.Heading: '#999999',
Generic.Subheading: '#aaaaaa',
Generic.Deleted: 'bg:#ffdddd #000000',
Generic.Inserted: 'bg:#ddffdd #000000',
Generic.Error: '#aa0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: '#555555',
Generic.Output: '#888888',
Generic.Traceback: '#aa0000',
Error: 'bg:#e3d2d2 #a61717'
}
| bsd-3-clause |
lssfau/walberla | python/waLBerla_docs/ipython/ipython-tutorials/material/matplotlib_setup.py | 1 | 3753 | import matplotlib
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
from tempfile import NamedTemporaryFile
import base64
from IPython import get_ipython
ipython = get_ipython()
ipython.magic("matplotlib inline") # Show plots as images embedded in iPython notebook
def setMplFigureSize():
matplotlib.rcParams['figure.figsize'] = (15.0, 12.0)
VIDEO_TAG = """<video controls width="80%">
<source src="data:video/x-m4v;base64,{0}" type="video/mp4">
Your browser does not support the video tag.
</video>"""
def __anim_to_html(anim, fps):
if not hasattr(anim, '_encoded_video'):
with NamedTemporaryFile(suffix='.mp4') as f:
anim.save(f.name, fps=fps, extra_args=['-vcodec', 'libx264', '-pix_fmt',
'yuv420p', '-profile:v', 'baseline', '-level', '3.0'])
video = open(f.name, "rb").read()
anim._encoded_video = base64.b64encode(video).decode('ascii')
return VIDEO_TAG.format(anim._encoded_video)
def makeImshowAnimation(grid, gridUpdateFunction, frames=90, **kwargs):
from functools import partial
fig = plt.figure()
im = plt.imshow(grid, interpolation='none')
def updatefig(*args, **kwargs):
image = kwargs['image']
image = gridUpdateFunction(image)
im.set_array(image)
return im,
return animation.FuncAnimation(fig, partial(updatefig, image=grid), frames=frames)
# ------- Version 1: Embed the animation as HTML5 video --------- ----------------------------------
def displayAsHtmlVideo(anim, fps=30, show=True, **kwargs):
try:
plt.close(anim._fig)
res = __anim_to_html(anim, fps)
if show:
return HTML(res)
else:
return HTML("")
except KeyboardInterrupt:
pass
# ------- Version 2: Animation is shown in extra matplotlib window ----------------------------------
def displayInExtraWindow(animation, *args, **kwargs):
fig = plt.gcf()
try:
fig.canvas.manager.window.raise_()
except Exception:
pass
plt.show()
# ------- Version 3: Animation is shown in images that are updated directly in website --------------
def displayAsHtmlImage(animation, show=True, iterations=10000, *args, **kwargs):
from IPython import display
try:
if show:
fig = plt.gcf()
if show:
animation._init_draw()
for i in range(iterations):
if show:
display.display(fig)
animation._step()
if show:
display.clear_output(wait=True)
except KeyboardInterrupt:
display.clear_output(wait=False)
# Dispatcher
animation_display_mode = 'imageupdate'
display_animation_func = None
def disp(*args, **kwargs):
if not display_animation_func:
raise ("Call set_display_mode first")
return display_animation_func(*args, **kwargs)
def set_display_mode(mode):
from IPython import get_ipython
ipython = get_ipython()
global animation_display_mode
global display_animation_func
animation_display_mode = mode
if animation_display_mode == 'video':
ipython.magic("matplotlib inline")
display_animation_func = displayAsHtmlVideo
elif animation_display_mode == 'window':
ipython.magic("matplotlib qt")
display_animation_func = displayInExtraWindow
elif animation_display_mode == 'imageupdate':
ipython.magic("matplotlib inline")
display_animation_func = displayAsHtmlImage
else:
raise Exception("Unknown mode. Available modes 'imageupdate', 'video' and 'window' ")
set_display_mode('imageupdate')
setMplFigureSize()
| gpl-3.0 |
openstack/tacker | tacker/tests/unit/api/v1/test_resource_helper.py | 2 | 3758 | # Copyright (c) 2014-2018 China Mobile (SuZhou) Software Technology Co.,Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from tacker import manager
from tacker.api.v1.resource_helper import build_plural_mappings
from tacker.api.v1.resource_helper import build_resource_info
from tacker.tests import base
class ResourceHelperTestCase(base.BaseTestCase):
def test_build_plural_mappings(self):
special_mappings = {}
resource_map = {
'vims': {
'id': {
'allow_post': False,
'allow_put': False,
}
},
'vnffgs': {
'id': {
'allow_post': False,
'allow_put': False,
}
},
}
expected_res = {'vnffgs': 'vnffg', 'vims': 'vim'}
result = build_plural_mappings(special_mappings, resource_map)
self.assertEqual(expected_res, result)
def test_build_plural_mappings_with_suffix_y(self):
special_mappings = {}
resource_map = {
'policies': {
'id': {
'allow_post': False,
}
},
'vnffgs': {
'id': {
'allow_post': False,
'allow_put': False,
}
},
}
expected_res = {'vnffgs': 'vnffg', 'policies': 'policy'}
result = build_plural_mappings(special_mappings, resource_map)
self.assertEqual(expected_res, result)
@mock.patch.object(manager.TackerManager, "get_service_plugins")
def test_build_resource_info(self, mock_get_service_plugins):
mock_get_service_plugins.return_value = {"DUMMY": ""}
plural_mappings = {'test_vnffgs': 'test_vnffg', 'policies': 'policy'}
resource_map = {
'policies': {
'id': {
'allow_post': False,
}
},
'test_vnffgs': {
'id': {
'allow_post': False,
},
}
}
action_map = {'policy': ['do', 'undo']}
result = build_resource_info(plural_mappings, resource_map, "DUMMY")
self.assertEqual(2, len(result))
self.assertEqual("/dummy_svc", result[0].path_prefix)
self.assertEqual("/dummy_svc", result[1].path_prefix)
self.assertEqual({'id': {'allow_post': False}}, result[0].attr_map)
self.assertEqual({'id': {'allow_post': False}}, result[1].attr_map)
result = build_resource_info(plural_mappings, resource_map, "DUMMY",
action_map=action_map)
for i in range(len(result)):
a = result[i].member_actions in ({}, ['do', 'undo'])
self.assertEqual(a, True)
result = build_resource_info(plural_mappings, resource_map, "DUMMY",
action_map=action_map,
translate_name=True)
for i in range(len(result)):
a = result[i].collection in ('policies', 'test-vnffgs')
self.assertEqual(a, True)
| apache-2.0 |
sve-odoo/odoo | addons/lunch/report/order.py | 377 | 2637 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
from openerp.osv import osv
class order(report_sxw.rml_parse):
def get_lines(self, user,objects):
lines=[]
for obj in objects:
if user.id==obj.user_id.id:
lines.append(obj)
return lines
def get_total(self, user,objects):
lines=[]
for obj in objects:
if user.id==obj.user_id.id:
lines.append(obj)
total=0.0
for line in lines:
total+=line.price
self.net_total+=total
return total
def get_nettotal(self):
return self.net_total
def get_users(self, objects):
users=[]
for obj in objects:
if obj.user_id not in users:
users.append(obj.user_id)
return users
def get_note(self,objects):
notes=[]
for obj in objects:
notes.append(obj.note)
return notes
def __init__(self, cr, uid, name, context):
super(order, self).__init__(cr, uid, name, context)
self.net_total=0.0
self.localcontext.update({
'time': time,
'get_lines': self.get_lines,
'get_users': self.get_users,
'get_total': self.get_total,
'get_nettotal': self.get_nettotal,
'get_note': self.get_note,
})
class report_lunchorder(osv.AbstractModel):
_name = 'report.lunch.report_lunchorder'
_inherit = 'report.abstract_report'
_template = 'lunch.report_lunchorder'
_wrapped_report_class = order
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
zerosum0x0/koadic | core/stager.py | 1 | 9463 | import core.plugin
import core.server
import core.payload
import random
import string
import socket
import uuid
class StagerWizard(core.plugin.Plugin):
WORKLOAD = 'NONE'
def __init__(self, shell):
self.port = 9999
super(StagerWizard, self).__init__(shell)
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
hostname = '0.0.0.0'
try:
s.connect(('8.8.8.8', 80))
hostname = s.getsockname()[0]
except:
pass
finally:
s.close()
# general, non-hidden, non-advanced options
self.options.register('SRVHOST', hostname, 'Where the stager should call home', alias = 'LHOST')
self.options.register('SRVPORT', self.port, 'The port to listen for stagers on', alias = 'LPORT')
self.options.register('EXPIRES', '', 'MM/DD/YYYY to stop calling home', required = False)
self.options.register('KEYPATH', '', 'Private key for TLS communications', required = False, file = True)
self.options.register('CERTPATH', '', 'Certificate for TLS communications', required = False, file = True)
self.options.register('ENDPOINT', self.random_string(5), 'URL path for callhome operations', required = True)
self.options.register('MODULE', '', 'Module to run once zombie is staged', required = False)
self.options.register('ONESHOT', 'false', 'oneshot', boolean = True)
self.options.register('AUTOFWD', 'true', 'automatically fix forwarded connection URLs', boolean=True, required=True)
# names of query string properties
jobname = sessionname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
while sessionname == jobname:
sessionname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
self.options.register('JOBNAME', jobname, 'name for jobkey cookie', advanced = True)
self.options.register('SESSIONNAME', sessionname, 'name for session cookie', advanced = True)
self.options.register('OBFUSCATE', 'xor', 'obfuscate payloads with defined technique (\'\', xor) (blank = no obfuscation)', advanced = True, enum = ['', 'xor'])
# query strings
self.options.register('_JOBPATH_', '', 'the job path', hidden = True)
self.options.register('_SESSIONPATH_', '', 'the session path', hidden = True)
# script payload file paths
self.options.register('_STDLIB_', self.stdlib, 'path to stdlib file', hidden = True)
self.options.register('_STAGETEMPLATE_', self.stagetemplate, 'path to stage template file', hidden = True)
self.options.register('_STAGE_', self.stage, 'stage worker', hidden = True)
self.options.register('_STAGECMD_', self.stagecmd, 'path to stage file', hidden = True)
self.options.register('_FORKCMD_', self.forkcmd, 'path to fork file', hidden = True)
self.options.register('_FORKTEMPLATE_', self.forktemplate, 'path to fork template file', hidden = True)
self.options.register('_WORKLOAD_', self.workload, 'workload type', hidden = True)
# other things
self.options.register("SESSIONKEY", "", "unique key for a session", hidden=True)
self.options.register("JOBKEY", "", "unique key for a job", hidden=True)
self.options.register("URL", "", "url to the stager", hidden=True)
self.options.register('CLASSICMODE', '', ';)', hidden = True, enum=['true', 'false'])
self.options.register('_EXPIREEPOCH_', '', 'time to expire', hidden = True)
self.options.register('_MODULEOPTIONS_', '', 'options for module on run', hidden = True)
self.options.register('ENDPOINTTYPE', '', 'filetype to append to endpoint if needed', hidden = True)
self.options.register('FENDPOINT', '', 'final endpoint', hidden = True)
def run(self):
if self.options.get('ONESHOT') == 'true' and not self.options.get('MODULE'):
self.shell.print_error('A ONESHOT Zombie needs a MODULE')
return
if self.options.get('CLASSICMODE') == 'true':
self.options.set('ENDPOINT', self.random_string(4000))
srvport = int(str(self.options.get('SRVPORT')).strip())
endpoint = self.options.get('ENDPOINT').strip()
# if srvport in servers, then we already have a server running
if srvport in self.shell.servers:
if endpoint in self.shell.stagers[srvport]:
self.shell.print_error("There is already a stager listening on that endpoint")
else:
self.spawn_stager(srvport, endpoint);
# if not, then we need to start a server
else:
keypath = self.options.get('KEYPATH').strip()
certpath = self.options.get('CERTPATH').strip()
if self.start_server(srvport, keypath, certpath):
self.shell.stagers[srvport] = {}
self.spawn_stager(srvport, endpoint);
def spawn_stager(self, srvport, endpoint):
import copy
new_stager = Stager(self.shell, copy.deepcopy(self.options))
self.shell.stagers[srvport][endpoint] = new_stager
self.shell.play_sound('STAGER')
self.shell.print_good(f"Spawned a stager at {new_stager.options.get('URL')}")
self.shell.print_command(new_stager.get_payload_data().decode())
def start_server(self, port, keypath, certpath):
try:
server = core.server.Server(port, core.handler.Handler, keypath, certpath, self.shell, self.options)
self.shell.servers[port] = server
server.start()
return True
except OSError as e:
port = str(self.options.get('SRVPORT'))
if e.errno == 98:
self.shell.print_error('Port %s is already bound!' % (port))
elif e.errno == 13:
self.shell.print_error('Port %s bind permission denied!' % (port))
else:
raise
return False
except Exception as ex:
import traceback
template = 'An exception of type {0} occured. Arguments:\n{1!r}'
message = template.format(type(ex).__name__, ex.args)
self.shell.print_error(message)
traceback.print_exc()
return False
except:
self.shell.print_error('Failed to spawn stager')
raise
return False
class Stager():
def __init__(self, shell, options):
self.shell = shell
self.options = options
self.killed = False
self.module = self.shell.state
if self.options.get('EXPIRES'):
from datetime import datetime
import time
dtime = datetime.strptime(self.options.get('EXPIRES'), '%m/%d/%Y')
etime = int(round((dtime - datetime.utcfromtimestamp(0)).total_seconds()*1000))
if etime < int(round(time.time() * 1000)):
self.shell.print_error('Expiration date cannot be today or in the past')
return False
self.options.set('_EXPIREEPOCH_', etime)
else:
self.options.set('_EXPIREEPOCH_', str(random.randint(100000000000000,999999999999999)))
keyt = self.options.get('KEYPATH')
cert = self.options.get('CERTPATH')
self.is_https = False
if cert and keyt:
self.is_https = True
self.options.set('SRVHOST', self.options.get('SRVHOST').strip())
self.options.set('SRVPORT', int(str(self.options.get('SRVPORT')).strip()))
self.options.set('ENDPOINT', self.options.get('ENDPOINT').strip())
self.options.set('FENDPOINT', self.options.get('ENDPOINT')+self.options.get('ENDPOINTTYPE'))
self.options.set('_FORKCMD_', self.options.get('_FORKCMD_').decode().replace('\\','\\\\').replace('\"', '\\\"').encode())
self.options.set('URL', self._build_url())
if self.options.get('MODULE'):
import copy
module = self.options.get("MODULE")
if '/' not in module:
module = [k for k in self.shell.plugins if k.lower().split('/')[-1] == module.lower()][0]
self.options.set("MODULE", module)
plugin = self.shell.plugins[module]
options = copy.deepcopy(plugin.options)
self.options.set('_MODULEOPTIONS_', options)
stage_cmd = self.options.get("_STAGECMD_")
payload_cmd = core.loader.apply_options(stage_cmd, self.options)
self.payload = core.payload.Payload(payload_cmd)
self.WORKLOAD = self.options.get('_WORKLOAD_')
self.endpoint = self.options.get('ENDPOINT')
def _build_url(self):
hostname = self.options.get("SRVHOST")
if hostname == '0.0.0.0':
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(('8.8.8.8', 80))
hostname = s.getsockname()[0]
finally:
s.close()
self.hostname = hostname
self.port = str(self.options.get("SRVPORT"))
prefix = "https" if self.is_https else "http"
url = prefix + "://" + self.hostname + ':' + self.port
endpoint = self.options.get("FENDPOINT").strip()
url += "/" + endpoint
return url
def get_payload_data(self):
return self.payload.data
def get_payload_id(self):
return self.payload.id
| apache-2.0 |
ashhher3/invenio | modules/bibsword/lib/bibsword_webinterface.py | 31 | 22937 | '''
Forward to ArXiv.org source code
'''
## This file is part of Invenio.
## Copyright (C) 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
import os
from invenio.access_control_engine import acc_authorize_action
from invenio.config import CFG_SITE_URL, CFG_TMPDIR
from invenio.webuser import page_not_authorized, collect_user_info
from invenio.bibsword_client import perform_display_sub_status, \
perform_display_server_list, \
perform_display_collection_list, \
perform_display_category_list, \
perform_display_metadata, \
perform_submit_record, \
perform_display_server_infos, \
list_remote_servers
from invenio.webpage import page
from invenio.messages import gettext_set_language
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.websubmit_functions.Get_Recid import \
get_existing_records_for_reportnumber
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibsword_config import CFG_MARC_REPORT_NUMBER, CFG_MARC_ADDITIONAL_REPORT_NUMBER
class WebInterfaceSword(WebInterfaceDirectory):
""" Handle /bibsword set of pages."""
_exports = ['', 'remoteserverinfos']
def __init__(self, reqid=None):
'''Initialize'''
self.reqid = reqid
def __call__(self, req, form):
errors = []
warnings = []
body = ''
error_messages = []
#***********************************************************************
# Get values from the form
#***********************************************************************
argd = wash_urlargd(form, {
'ln' : (str, ''),
# information of the state of the form submission
'status' : (str, ''),
'submit' : (str, ''),
'last_row' : (str, ''),
'first_row' : (str, ''),
'offset' : (int, ''),
'total_rows' : (str, ''),
# mendatory informations
'id_record' : (str, ''),
'recid' : (int, 0),
'id_remote_server' : (str, ''),
'id_collection' : (str, ''),
'id_primary' : (str, ''),
'id_categories' : (list, []),
'id' : (str, ''),
'title' : (str, ''),
'summary' : (str, ''),
'author_name' : (str, ''),
'author_email' : (str, ''),
'contributor_name' : (list, []),
'contributor_email' : (list, []),
'contributor_affiliation' : (list, []),
# optionnal informations
'comment' : (str, ''),
'doi' : (str, ''),
'type' : (str, ''),
'journal_refs' : (list, []),
'report_nos' : (list, []),
'media' : (list, []),
'new_media' : (str, ''),
'filename' : (str, '')
})
# set language for i18n text auto generation
_ = gettext_set_language(argd['ln'])
#authentication
(auth_code, auth_message) = self.check_credential(req)
if auth_code != 0:
return page_not_authorized(req=req, referer='/bibsword',
text=auth_message, navtrail='')
user_info = collect_user_info(req)
#Build contributor tuples {name, email and affiliation(s)}
contributors = []
contributor_id = 0
affiliation_id = 0
for name in argd['contributor_name']:
contributor = {}
contributor['name'] = name
contributor['email'] = argd['contributor_email'][contributor_id]
contributor['affiliation'] = []
is_last_affiliation = False
while is_last_affiliation == False and \
affiliation_id < len(argd['contributor_affiliation']):
if argd['contributor_affiliation'][affiliation_id] == 'next':
is_last_affiliation = True
elif argd['contributor_affiliation'][affiliation_id] != '':
contributor['affiliation'].append(\
argd['contributor_affiliation'][affiliation_id])
affiliation_id += 1
contributors.append(contributor)
contributor_id += 1
argd['contributors'] = contributors
# get the uploaded file(s) (if there is one)
for key, formfields in form.items():
if key == "new_media" and hasattr(formfields, "filename") and formfields.filename:
filename = formfields.filename
fp = open(os.path.join(CFG_TMPDIR, filename), "w")
fp.write(formfields.file.read())
fp.close()
argd['media'].append(os.path.join(CFG_TMPDIR, filename))
argd['filename'] = os.path.join(CFG_TMPDIR, filename)
# Prepare navtrail
navtrail = '''<a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/help/admin">Admin Area</a>''' \
% {'CFG_SITE_URL': CFG_SITE_URL}
title = _("BibSword Admin Interface")
#***********************************************************************
# Display admin main page
#***********************************************************************
if argd['status'] == '' and argd['recid'] != '' and argd['id_remote_server'] != '':
remote_servers = list_remote_servers(argd['id_remote_server'])
if len(remote_servers) == 0:
error_messages.append("No corresponding remote server could be found")
(body, errors, warnings) = perform_display_server_list(
error_messages,
argd['id_record'])
else:
title = _("Export with BibSword: Step 2/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
(body, errors, warnings) = perform_display_collection_list(
argd['id_remote_server'],
argd['id_record'],
argd['recid'],
error_messages)
elif argd['status'] == '' or argd['submit'] == "Cancel":
(body, errors, warnings) = perform_display_sub_status()
elif argd['status'] == 'display_submission':
if argd['submit'] == 'Refresh all':
(body, errors, warnings) = \
perform_display_sub_status(1, argd['offset'], "refresh_all")
elif argd['submit'] == 'Select':
first_row = 1
(body, errors, warnings) = \
perform_display_sub_status(first_row, argd['offset'])
elif argd['submit'] == 'Next':
first_row = int(argd['last_row']) + 1
(body, errors, warnings) = \
perform_display_sub_status(first_row, argd['offset'])
elif argd['submit'] == 'Prev':
first_row = int(argd['first_row']) - int(argd['offset'])
(body, errors, warnings) = \
perform_display_sub_status(first_row, argd['offset'])
elif argd['submit'] == 'First':
(body, errors, warnings) = \
perform_display_sub_status(1, argd['offset'])
elif argd['submit'] == 'Last':
first_row = int(argd['total_rows']) - int(argd['offset']) + 1
(body, errors, warnings) = \
perform_display_sub_status(first_row, argd['offset'])
#***********************************************************************
# Select remote server
#***********************************************************************
# when the user validated the metadata, display
elif argd['submit'] == 'New submission':
title = _("Export with BibSword: Step 1/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
(body, errors, warnings) = \
perform_display_server_list(error_messages)
# check if the user has selected a remote server
elif argd['status'] == 'select_server':
title = _("Export with BibSword: Step 1/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
# check if given id_record exist and convert it in recid
if argd['recid'] != 0:
report_numbers = get_fieldvalues(argd['recid'], CFG_MARC_REPORT_NUMBER)
report_numbers.extend(get_fieldvalues(argd['recid'], CFG_MARC_ADDITIONAL_REPORT_NUMBER))
if report_numbers:
argd['id_record'] = report_numbers[0]
elif argd['id_record'] == '':
error_messages.append("You must specify a report number")
else:
recids = \
get_existing_records_for_reportnumber(argd['id_record'])
if len(recids) == 0:
error_messages.append(\
"No document found with the given report number")
elif len(recids) > 1:
error_messages.append(\
"Several documents have been found with given the report number")
else:
argd['recid'] = recids[0]
if argd['id_remote_server'] in ['0', '']:
error_messages.append("No remote server was selected")
if not argd['id_remote_server'] in ['0', '']:
# get the server's name and host
remote_servers = list_remote_servers(argd['id_remote_server'])
if len(remote_servers) == 0:
error_messages.append("No corresponding remote server could be found")
argd['id_remote_server'] = '0'
if argd['id_remote_server'] in ['0', ''] or argd['recid'] == 0:
(body, errors, warnings) = perform_display_server_list(
error_messages,
argd['id_record'])
else:
title = _("Export with BibSword: Step 2/4")
(body, errors, warnings) = perform_display_collection_list(
argd['id_remote_server'],
argd['id_record'],
argd['recid'],
error_messages)
#***********************************************************************
# Select collection
#***********************************************************************
# check if the user wants to change the remote server
elif argd['submit'] == 'Modify server':
title = _("Export with BibSword: Step 1/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
(body, errors, warnings) = \
perform_display_server_list(error_messages, argd['id_record'])
# check if the user has selected a collection
elif argd['status'] == 'select_collection':
title = _("Export with BibSword: Step 2/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL': CFG_SITE_URL}
if argd['id_collection'] == '0':
error_messages.append("No collection was selected")
(body, errors, warnings) = perform_display_collection_list(
argd['id_remote_server'],
argd['id_record'],
argd['recid'],
error_messages)
else:
title = _("Export with BibSword: Step 3/4")
(body, errors, warnings) = perform_display_category_list(
argd['id_remote_server'],
argd['id_collection'],
argd['id_record'],
argd['recid'],
error_messages)
#***********************************************************************
# Select primary
#***********************************************************************
# check if the user wants to change the collection
elif argd['submit'] == 'Modify collection':
title = _("Export with BibSword: Step 2/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL': CFG_SITE_URL}
(body, errors, warnings) = perform_display_collection_list(
argd['id_remote_server'],
argd['id_record'],
argd['recid'],
error_messages)
# check if the user has selected a primary category
elif argd['status'] == 'select_primary_category':
title = _("Export with BibSword: Step 3/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
if argd['id_primary'] == '0':
error_messages.append("No primary category selected")
(body, errors, warnings) = perform_display_category_list(
argd['id_remote_server'],
argd['id_collection'],
argd['id_record'],
argd['recid'],
error_messages)
else:
title = _("Export with BibSword: Step 4/4")
(body, errors, warnings) = perform_display_metadata(user_info,
str(argd['id_remote_server']),
str(argd['id_collection']),
str(argd['id_primary']),
argd['id_categories'],
argd['id_record'],
argd['recid'],
error_messages)
#***********************************************************************
# Check record media and metadata
#***********************************************************************
# check if the user wants to change the collection
elif argd['submit'] == 'Modify destination':
title = _("Export with BibSword: Step 3/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
(body, errors, warnings) = perform_display_category_list(
argd['id_remote_server'],
argd['id_collection'],
argd['id_record'],
argd['recid'],
error_messages)
# check if the metadata are complet and well-formed
elif argd['status'] == 'check_submission':
title = _("Export with BibSword: Step 4/4")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
if argd['submit'] == "Upload":
error_messages.append("Media loaded")
if argd['id'] == '':
error_messages.append("Id is missing")
if argd['title'] == '':
error_messages.append("Title is missing")
if argd['summary'] == '':
error_messages.append("summary is missing")
elif len(argd['summary']) < 25:
error_messages.append("summary must have at least 25 character")
if argd['author_name'] == '':
error_messages.append("No submitter name specified")
if argd['author_email'] == '':
error_messages.append("No submitter email specified")
if len(argd['contributors']) == 0:
error_messages.append("No author specified")
if len(error_messages) > 0:
(body, errors, warnings) = perform_display_metadata(user_info,
str(argd['id_remote_server']),
str(argd['id_collection']),
str(argd['id_primary']),
argd['id_categories'],
argd['id_record'],
argd['recid'],
error_messages,
argd)
else:
title = _("Export with BibSword: Acknowledgement")
navtrail += ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
(body, errors, warnings) = perform_submit_record(user_info,
str(argd['id_remote_server']),
str(argd['id_collection']),
str(argd['id_primary']),
argd['id_categories'],
argd['recid'],
argd)
# return of all the updated informations to be display
return page(title = title,
body = body,
navtrail = navtrail,
#uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
#errors = errors,
warnings = warnings,
navmenuid = "yourmessages")
def remoteserverinfos(self, req, form):
'''
This method handle the /bibsword/remoteserverinfos call
'''
argd = wash_urlargd(form, {
'ln' : (str, ''),
'id' : (str, '')
})
#authentication
(auth_code, auth_message) = self.check_credential(req)
if auth_code != 0:
return page_not_authorized(req=req, referer='/bibsword',
text=auth_message, navtrail='')
body = perform_display_server_infos(argd['id'])
navtrail = ''' > <a class="navtrail" ''' \
'''href="%(CFG_SITE_URL)s/bibsword">''' \
'''SWORD Interface</a>''' % \
{'CFG_SITE_URL' : CFG_SITE_URL}
# return of all the updated informations to be display
return page(title = 'Remote server infos',
body = body,
navtrail = navtrail,
#uid = uid,
lastupdated = __lastupdated__,
req = req,
language = argd['ln'],
errors = '',
warnings = '',
navmenuid = "yourmessages")
def check_credential(self, req):
'''
This method check if the user has the right to get into this
function
'''
auth_code, auth_message = acc_authorize_action(req, 'runbibswordclient')
return (auth_code, auth_message)
index = __call__
| gpl-2.0 |
zubair-arbi/edx-platform | pavelib/quality.py | 2 | 18682 | """
Check code quality using pep8, pylint, and diff_quality.
"""
from paver.easy import sh, task, cmdopts, needs, BuildFailure
import os
import re
from .utils.envs import Env
ALL_SYSTEMS = 'lms,cms,common,openedx,pavelib'
def top_python_dirs(dirname):
"""
Find the directories to start from in order to find all the Python files in `dirname`.
"""
top_dirs = []
dir_init = os.path.join(dirname, "__init__.py")
if os.path.exists(dir_init):
top_dirs.append(dirname)
for directory in ['djangoapps', 'lib']:
subdir = os.path.join(dirname, directory)
subdir_init = os.path.join(subdir, "__init__.py")
if os.path.exists(subdir) and not os.path.exists(subdir_init):
dirs = os.listdir(subdir)
top_dirs.extend(d for d in dirs if os.path.isdir(os.path.join(subdir, d)))
return top_dirs
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("system=", "s", "System to act on"),
])
def find_fixme(options):
"""
Run pylint on system code, only looking for fixme items.
"""
num_fixme = 0
systems = getattr(options, 'system', ALL_SYSTEMS).split(',')
for system in systems:
# Directory to put the pylint report in.
# This makes the folder if it doesn't already exist.
report_dir = (Env.REPORT_DIR / system).makedirs_p()
apps_list = ' '.join(top_python_dirs(system))
pythonpath_prefix = (
"PYTHONPATH={system}:{system}/lib"
"common/djangoapps:common/lib".format(
system=system
)
)
sh(
"{pythonpath_prefix} pylint --disable R,C,W,E --enable=fixme "
"--msg-template={msg_template} {apps} "
"| tee {report_dir}/pylint_fixme.report".format(
pythonpath_prefix=pythonpath_prefix,
msg_template='"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"',
apps=apps_list,
report_dir=report_dir
)
)
num_fixme += _count_pylint_violations(
"{report_dir}/pylint_fixme.report".format(report_dir=report_dir))
print "Number of pylint fixmes: " + str(num_fixme)
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("system=", "s", "System to act on"),
("errors", "e", "Check for errors only"),
("limit=", "l", "limit for number of acceptable violations"),
])
def run_pylint(options):
"""
Run pylint on system code. When violations limit is passed in,
fail the task if too many violations are found.
"""
num_violations = 0
violations_limit = int(getattr(options, 'limit', -1))
errors = getattr(options, 'errors', False)
systems = getattr(options, 'system', ALL_SYSTEMS).split(',')
# Make sure the metrics subdirectory exists
Env.METRICS_DIR.makedirs_p()
for system in systems:
# Directory to put the pylint report in.
# This makes the folder if it doesn't already exist.
report_dir = (Env.REPORT_DIR / system).makedirs_p()
flags = []
if errors:
flags.append("--errors-only")
apps_list = ' '.join(top_python_dirs(system))
pythonpath_prefix = (
"PYTHONPATH={system}:{system}/djangoapps:{system}/"
"lib:common/djangoapps:common/lib".format(
system=system
)
)
sh(
"{pythonpath_prefix} pylint {flags} --msg-template={msg_template} {apps} | "
"tee {report_dir}/pylint.report".format(
pythonpath_prefix=pythonpath_prefix,
flags=" ".join(flags),
msg_template='"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"',
apps=apps_list,
report_dir=report_dir
)
)
num_violations += _count_pylint_violations(
"{report_dir}/pylint.report".format(report_dir=report_dir))
# Print number of violations to log
violations_count_str = "Number of pylint violations: " + str(num_violations)
print violations_count_str
# Also write the number of violations to a file
with open(Env.METRICS_DIR / "pylint", "w") as f:
f.write(violations_count_str)
# Fail number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception("Failed. Too many pylint violations. "
"The limit is {violations_limit}.".format(violations_limit=violations_limit))
def _count_pylint_violations(report_file):
"""
Parses a pylint report line-by-line and determines the number of violations reported
"""
num_violations_report = 0
# An example string:
# common/lib/xmodule/xmodule/tests/test_conditional.py:21: [C0111(missing-docstring), DummySystem] Missing docstring
# More examples can be found in the unit tests for this method
pylint_pattern = re.compile(r".(\d+):\ \[(\D\d+.+\]).")
for line in open(report_file):
violation_list_for_line = pylint_pattern.split(line)
# If the string is parsed into four parts, then we've found a violation. Example of split parts:
# test file, line number, violation name, violation details
if len(violation_list_for_line) == 4:
num_violations_report += 1
return num_violations_report
def _get_pep8_violations():
"""
Runs pep8. Returns a tuple of (number_of_violations, violations_string)
where violations_string is a string of all pep8 violations found, separated
by new lines.
"""
report_dir = (Env.REPORT_DIR / 'pep8')
report_dir.rmtree(ignore_errors=True)
report_dir.makedirs_p()
# Make sure the metrics subdirectory exists
Env.METRICS_DIR.makedirs_p()
sh('pep8 . | tee {report_dir}/pep8.report -a'.format(report_dir=report_dir))
count, violations_list = _pep8_violations(
"{report_dir}/pep8.report".format(report_dir=report_dir)
)
return (count, violations_list)
def _pep8_violations(report_file):
"""
Returns a tuple of (num_violations, violations_list) for all
pep8 violations in the given report_file.
"""
with open(report_file) as f:
violations_list = f.readlines()
num_lines = len(violations_list)
return num_lines, violations_list
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("system=", "s", "System to act on"),
])
def run_pep8(options): # pylint: disable=unused-argument
"""
Run pep8 on system code.
Fail the task if any violations are found.
"""
(count, violations_list) = _get_pep8_violations()
violations_list = ''.join(violations_list)
# Print number of violations to log
violations_count_str = "Number of pep8 violations: {count}".format(count=count)
print violations_count_str
print violations_list
# Also write the number of violations to a file
with open(Env.METRICS_DIR / "pep8", "w") as f:
f.write(violations_count_str + '\n\n')
f.write(violations_list)
# Fail if any violations are found
if count:
failure_string = "Too many pep8 violations. " + violations_count_str
failure_string += "\n\nViolations:\n{violations_list}".format(violations_list=violations_list)
raise Exception(failure_string)
@task
@needs('pavelib.prereqs.install_python_prereqs')
def run_complexity():
"""
Uses radon to examine cyclomatic complexity.
For additional details on radon, see http://radon.readthedocs.org/
"""
system_string = 'cms/ lms/ common/ openedx/'
complexity_report_dir = (Env.REPORT_DIR / "complexity")
complexity_report = complexity_report_dir / "python_complexity.log"
# Ensure directory structure is in place: metrics dir, and an empty complexity report dir.
Env.METRICS_DIR.makedirs_p()
_prepare_report_dir(complexity_report_dir)
print "--> Calculating cyclomatic complexity of python files..."
try:
sh(
"radon cc {system_string} --total-average > {complexity_report}".format(
system_string=system_string,
complexity_report=complexity_report
)
)
complexity_metric = _get_count_from_last_line(complexity_report, "python_complexity")
_write_metric(
complexity_metric,
(Env.METRICS_DIR / "python_complexity")
)
print "--> Python cyclomatic complexity report complete."
print "radon cyclomatic complexity score: {metric}".format(metric=str(complexity_metric))
except BuildFailure:
print "ERROR: Unable to calculate python-only code-complexity."
@task
@needs('pavelib.prereqs.install_node_prereqs')
@cmdopts([
("limit=", "l", "limit for number of acceptable violations"),
])
def run_jshint(options):
"""
Runs jshint on static asset directories
"""
violations_limit = int(getattr(options, 'limit', -1))
jshint_report_dir = (Env.REPORT_DIR / "jshint")
jshint_report = jshint_report_dir / "jshint.report"
_prepare_report_dir(jshint_report_dir)
sh(
"jshint {root} --config .jshintrc >> {jshint_report}".format(
root=Env.REPO_ROOT, jshint_report=jshint_report
),
ignore_error=True
)
try:
num_violations = int(_get_count_from_last_line(jshint_report, "jshint"))
except TypeError:
raise BuildFailure(
"Error. Number of jshint violations could not be found in {jshint_report}".format(
jshint_report=jshint_report
)
)
# Record the metric
_write_metric(num_violations, (Env.METRICS_DIR / "jshint"))
# Fail if number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception(
"JSHint Failed. Too many violations ({count}).\nThe limit is {violations_limit}.".format(
count=num_violations, violations_limit=violations_limit
)
)
def _write_metric(metric, filename):
"""
Write a given metric to a given file
Used for things like reports/metrics/jshint, which will simply tell you the number of
jshint violations found
"""
with open(filename, "w") as metric_file:
metric_file.write(str(metric))
def _prepare_report_dir(dir_name):
"""
Sets a given directory to a created, but empty state
"""
dir_name.rmtree_p()
dir_name.mkdir_p()
def _get_last_report_line(filename):
"""
Returns the last line of a given file. Used for getting output from quality output files.
"""
file_not_found_message = "The following log file could not be found: {file}".format(file=filename)
if os.path.isfile(filename):
with open(filename, 'r') as report_file:
lines = report_file.readlines()
return lines[len(lines) - 1]
else:
# Raise a build error if the file is not found
raise BuildFailure(file_not_found_message)
def _get_count_from_last_line(filename, file_type):
"""
This will return the number in the last line of a file.
It is returning only the value (as a floating number).
"""
last_line = _get_last_report_line(filename)
if file_type is "python_complexity":
# Example of the last line of a complexity report: "Average complexity: A (1.93953443446)"
regex = r'\d+.\d+'
else:
# Example of the last line of a jshint report (for example): "3482 errors"
regex = r'^\d+'
try:
return float(re.search(regex, last_line).group(0))
# An AttributeError will occur if the regex finds no matches.
# A ValueError will occur if the returned regex cannot be cast as a float.
except (AttributeError, ValueError):
return None
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("compare-branch=", "b", "Branch to compare against, defaults to origin/master"),
("percentage=", "p", "fail if diff-quality is below this percentage"),
])
def run_quality(options):
"""
Build the html diff quality reports, and print the reports to the console.
:param: b, the branch to compare against, defaults to origin/master
:param: p, diff-quality will fail if the quality percentage calculated is
below this percentage. For example, if p is set to 80, and diff-quality finds
quality of the branch vs the compare branch is less than 80%, then this task will fail.
This threshold would be applied to both pep8 and pylint.
"""
# Directory to put the diff reports in.
# This makes the folder if it doesn't already exist.
dquality_dir = (Env.REPORT_DIR / "diff_quality").makedirs_p()
# Save the pass variable. It will be set to false later if failures are detected.
diff_quality_percentage_pass = True
def _pep8_output(count, violations_list, is_html=False):
"""
Given a count & list of pep8 violations, pretty-print the pep8 output.
If `is_html`, will print out with HTML markup.
"""
if is_html:
lines = ['<body>\n']
sep = '-------------<br/>\n'
title = "<h1>Quality Report: pep8</h1>\n"
violations_bullets = ''.join(
['<li>{violation}</li><br/>\n'.format(violation=violation) for violation in violations_list]
)
violations_str = '<ul>\n{bullets}</ul>\n'.format(bullets=violations_bullets)
violations_count_str = "<b>Violations</b>: {count}<br/>\n"
fail_line = "<b>FAILURE</b>: pep8 count should be 0<br/>\n"
else:
lines = []
sep = '-------------\n'
title = "Quality Report: pep8\n"
violations_str = ''.join(violations_list)
violations_count_str = "Violations: {count}\n"
fail_line = "FAILURE: pep8 count should be 0\n"
violations_count_str = violations_count_str.format(count=count)
lines.extend([sep, title, sep, violations_str, sep, violations_count_str])
if count > 0:
lines.append(fail_line)
lines.append(sep + '\n')
if is_html:
lines.append('</body>')
return ''.join(lines)
# Run pep8 directly since we have 0 violations on master
(count, violations_list) = _get_pep8_violations()
# Print number of violations to log
print _pep8_output(count, violations_list)
# Also write the number of violations to a file
with open(dquality_dir / "diff_quality_pep8.html", "w") as f:
f.write(_pep8_output(count, violations_list, is_html=True))
if count > 0:
diff_quality_percentage_pass = False
# ----- Set up for diff-quality pylint call -----
# Set the string, if needed, to be used for the diff-quality --compare-branch switch.
compare_branch = getattr(options, 'compare_branch', None)
compare_branch_string = u''
if compare_branch:
compare_branch_string = u'--compare-branch={0}'.format(compare_branch)
# Set the string, if needed, to be used for the diff-quality --fail-under switch.
diff_threshold = int(getattr(options, 'percentage', -1))
percentage_string = u''
if diff_threshold > -1:
percentage_string = u'--fail-under={0}'.format(diff_threshold)
# Generate diff-quality html report for pylint, and print to console
# If pylint reports exist, use those
# Otherwise, `diff-quality` will call pylint itself
pylint_files = get_violations_reports("pylint")
pylint_reports = u' '.join(pylint_files)
jshint_files = get_violations_reports("jshint")
jshint_reports = u' '.join(jshint_files)
pythonpath_prefix = (
"PYTHONPATH=$PYTHONPATH:lms:lms/djangoapps:lms/lib:cms:cms/djangoapps:cms/lib:"
"common:common/djangoapps:common/lib"
)
# run diff-quality for pylint.
if not run_diff_quality(
violations_type="pylint",
prefix=pythonpath_prefix,
reports=pylint_reports,
percentage_string=percentage_string,
branch_string=compare_branch_string,
dquality_dir=dquality_dir
):
diff_quality_percentage_pass = False
# run diff-quality for jshint.
if not run_diff_quality(
violations_type="jshint",
prefix=pythonpath_prefix,
reports=jshint_reports,
percentage_string=percentage_string,
branch_string=compare_branch_string,
dquality_dir=dquality_dir
):
diff_quality_percentage_pass = False
# If one of the quality runs fails, then paver exits with an error when it is finished
if not diff_quality_percentage_pass:
raise BuildFailure("Diff-quality failure(s).")
def run_diff_quality(
violations_type=None, prefix=None, reports=None, percentage_string=None, branch_string=None, dquality_dir=None
):
"""
This executes the diff-quality commandline tool for the given violation type (e.g., pylint, jshint).
If diff-quality fails due to quality issues, this method returns False.
"""
try:
sh(
"{pythonpath_prefix} diff-quality --violations={type} "
"{reports} {percentage_string} {compare_branch_string} "
"--html-report {dquality_dir}/diff_quality_{type}.html ".format(
type=violations_type,
pythonpath_prefix=prefix,
reports=reports,
percentage_string=percentage_string,
compare_branch_string=branch_string,
dquality_dir=dquality_dir,
)
)
return True
except BuildFailure, error_message:
if is_percentage_failure(error_message):
return False
else:
raise BuildFailure(error_message)
def is_percentage_failure(error_message):
"""
When diff-quality is run with a threshold percentage, it ends with an exit code of 1. This bubbles up to
paver with a subprocess return code error. If the subprocess exits with anything other than 1, raise
a paver exception.
"""
if "Subprocess return code: 1" not in error_message:
return False
else:
return True
def get_violations_reports(violations_type):
"""
Finds violations reports files by naming convention (e.g., all "pep8.report" files)
"""
violations_files = []
for subdir, _dirs, files in os.walk(os.path.join(Env.REPORT_DIR)):
for f in files:
if f == "{violations_type}.report".format(violations_type=violations_type):
violations_files.append(os.path.join(subdir, f))
return violations_files
| agpl-3.0 |
jeanmask/opps | opps/db/models/fields/jsonf.py | 5 | 5423 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
import json
from django.db import models
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.forms import fields
from django.forms.util import ValidationError
from south.modelsinspector import add_introspection_rules
from .subclassing import SubfieldBase
class JSONFormFieldBase(object):
def to_python(self, value):
if isinstance(value, six.string_types):
try:
return json.loads(value)
except ValueError:
raise ValidationError(_("Enter valid JSON"))
return value
def clean(self, value):
if not value and not self.required:
return None
# Trap cleaning errors & bubble them up as JSON errors
try:
return super(JSONFormFieldBase, self).clean(value)
except TypeError:
raise ValidationError(_("Enter valid JSON"))
class JSONFormField(JSONFormFieldBase, fields.Field):
pass
class JSONCharFormField(JSONFormFieldBase, fields.CharField):
pass
class JSONFieldBase(six.with_metaclass(SubfieldBase, models.Field)):
def __init__(self, *args, **kwargs):
self.dump_kwargs = kwargs.pop('dump_kwargs', {
'cls': DjangoJSONEncoder,
'separators': (',', ':')
})
self.load_kwargs = kwargs.pop('load_kwargs', {})
super(JSONFieldBase, self).__init__(*args, **kwargs)
def pre_init(self, value, obj):
"""Convert a string value to JSON only if it needs
to be deserialized.
SubfieldBase meteaclass has been modified to call
this method instead of
to_python so that we can check the obj state and
determine if it needs to be
deserialized"""
if obj._state.adding:
# Make sure the primary key actually exists on the object before
# checking if it's empty. This is a special case for
# South datamigrations
# see: https://github.com/bradjasper/django-jsonfield/issues/52
if not hasattr(obj, "pk") or obj.pk is not None:
if isinstance(value, six.string_types):
try:
return json.loads(value, **self.load_kwargs)
except ValueError:
raise ValidationError(_("Enter valid JSON"))
return value
def to_python(self, value):
"""The SubfieldBase metaclass calls pre_init instead of to_python,
however to_python
is still necessary for Django's deserializer"""
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert JSON object to a string"""
if self.null and value is None:
return None
return json.dumps(value, **self.dump_kwargs)
def value_to_string(self, obj):
value = self._get_val_from_obj(obj)
return self.get_db_prep_value(value, None)
def value_from_object(self, obj):
value = super(JSONFieldBase, self).value_from_object(obj)
if self.null and value is None:
return None
return self.dumps_for_display(value)
def dumps_for_display(self, value):
return json.dumps(value, **self.dump_kwargs)
def formfield(self, **kwargs):
if "form_class" not in kwargs:
kwargs["form_class"] = self.form_class
field = super(JSONFieldBase, self).formfield(**kwargs)
if not field.help_text:
field.help_text = "Enter valid JSON"
return field
def get_default(self):
"""
Returns the default value for this field.
The default implementation on models.Field calls force_unicode
on the default, which means you can't set arbitrary Python
objects as the default. To fix this, we just return the value
without calling force_unicode on it. Note that if you set a
callable as a default, the field will still call it. It will
*not* try to pickle and encode it.
"""
if self.has_default():
if callable(self.default):
return self.default()
return copy.deepcopy(self.default)
# If the field doesn't have a default, then we punt to models.Field.
return super(JSONFieldBase, self).get_default()
def db_type(self, connection):
if connection.vendor == 'postgresql' and\
connection.pg_version >= 90400:
return 'json'
else:
return super(JSONFieldBase, self).db_type(connection)
class JSONField(JSONFieldBase, models.TextField):
"""JSONField is a generic textfield that serializes/unserializes
JSON objects"""
form_class = JSONFormField
def dumps_for_display(self, value):
kwargs = {"indent": 2}
kwargs.update(self.dump_kwargs)
return json.dumps(value, **kwargs)
class JSONCharField(JSONFieldBase, models.CharField):
"""JSONCharField is a generic textfield that serializes/unserializes
JSON objects,
stored in the database like a CharField, which enables it to be used
e.g. in unique keys"""
form_class = JSONCharFormField
add_introspection_rules([], ["^opps\.db\.models\.fields\.jsonf\."
"(JSONField|JSONCharField)"])
| mit |
wenottingham/ansible | lib/ansible/cli/doc.py | 3 | 12788 | # (c) 2014, James Tanner <tanner.jc@gmail.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
# http://docs.ansible.com/playbooks_vault.html for more details.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import traceback
import textwrap
from ansible.compat.six import iteritems, string_types
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.plugins import module_loader, action_loader
from ansible.cli import CLI
from ansible.utils import module_docs
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class DocCLI(CLI):
""" Vault command line class """
def __init__(self, args):
super(DocCLI, self).__init__(args)
self.module_list = []
def parse(self):
self.parser = CLI.base_parser(
usage='usage: %prog [options] [module...]',
epilog='Show Ansible module documentation',
module_opts=True,
)
self.parser.add_option("-l", "--list", action="store_true", default=False, dest='list_dir',
help='List available modules')
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
help='Show playbook snippet for specified module(s)')
super(DocCLI, self).parse()
display.verbosity = self.options.verbosity
def run(self):
super(DocCLI, self).run()
if self.options.module_path is not None:
for i in self.options.module_path.split(os.pathsep):
module_loader.add_directory(i)
# list modules
if self.options.list_dir:
paths = module_loader._get_paths()
for path in paths:
self.find_modules(path)
self.pager(self.get_module_list_text())
return 0
if len(self.args) == 0:
raise AnsibleOptionsError("Incorrect options passed")
# process command line module list
text = ''
for module in self.args:
try:
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
display.warning("module %s not found in %s\n" % (module, DocCLI.print_paths(module_loader)))
continue
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
continue
try:
doc, plainexamples, returndocs = module_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
except:
display.vvv(traceback.format_exc())
display.error("module %s has a documentation error formatting or is missing documentation\nTo see exact traceback use -vvv" % module)
continue
if doc is not None:
# is there corresponding action plugin?
if module in action_loader:
doc['action'] = True
else:
doc['action'] = False
all_keys = []
for (k,v) in iteritems(doc['options']):
all_keys.append(k)
all_keys = sorted(all_keys)
doc['option_keys'] = all_keys
doc['filename'] = filename
doc['docuri'] = doc['module'].replace('_', '-')
doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
doc['plainexamples'] = plainexamples
doc['returndocs'] = returndocs
if self.options.show_snippet:
text += self.get_snippet_text(doc)
else:
text += self.get_man_text(doc)
else:
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
# probably a quoting issue.
raise AnsibleError("Parsing produced an empty object.")
except Exception as e:
display.vvv(traceback.format_exc())
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))
if text:
self.pager(text)
return 0
def find_modules(self, path):
if os.path.isdir(path):
for module in os.listdir(path):
if module.startswith('.'):
continue
elif os.path.isdir(module):
self.find_modules(module)
elif any(module.endswith(x) for x in C.BLACKLIST_EXTS):
continue
elif module.startswith('__'):
continue
elif module in C.IGNORE_FILES:
continue
elif module.startswith('_'):
fullpath = '/'.join([path,module])
if os.path.islink(fullpath): # avoids aliases
continue
module = os.path.splitext(module)[0] # removes the extension
self.module_list.append(module)
def get_module_list_text(self):
columns = display.columns
displace = max(len(x) for x in self.module_list)
linelimit = columns - displace - 5
text = []
deprecated = []
for module in sorted(set(self.module_list)):
if module in module_docs.BLACKLIST_MODULES:
continue
# if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = module_loader.find_plugin(module, mod_type='.py')
if filename is None:
continue
if filename.endswith(".ps1"):
continue
if os.path.isdir(filename):
continue
try:
doc, plainexamples, returndocs = module_docs.get_docstring(filename)
desc = self.tty_ify(doc.get('short_description', '?')).strip()
if len(desc) > linelimit:
desc = desc[:linelimit] + '...'
if module.startswith('_'): # Handle deprecated
deprecated.append("%-*s %-*.*s" % (displace, module[1:], linelimit, len(desc), desc))
else:
text.append("%-*s %-*.*s" % (displace, module, linelimit, len(desc), desc))
except:
raise AnsibleError("module %s has a documentation error formatting or is missing documentation\n" % module)
if len(deprecated) > 0:
text.append("\nDEPRECATED:")
text.extend(deprecated)
return "\n".join(text)
@staticmethod
def print_paths(finder):
''' Returns a string suitable for printing of the search path '''
# Uses a list to get the order right
ret = []
for i in finder._get_paths():
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
def get_snippet_text(self, doc):
text = []
desc = CLI.tty_ify(doc['short_description'])
text.append("- name: %s" % (desc))
text.append(" action: %s" % (doc['module']))
pad = 31
subdent = " " * pad
limit = display.columns - pad
for o in sorted(doc['options'].keys()):
opt = doc['options'][o]
desc = CLI.tty_ify(" ".join(opt['description']))
required = opt.get('required', False)
if not isinstance(required, bool):
raise("Incorrect value for 'Required', a boolean is needed.: %s" % required)
if required:
s = o + "="
else:
s = o
text.append(" %-20s # %s" % (s, textwrap.fill(desc, limit, subsequent_indent=subdent)))
text.append('')
return "\n".join(text)
def get_man_text(self, doc):
opt_indent=" "
text = []
text.append("> %s\n" % doc['module'].upper())
pad = display.columns * 0.20
limit = max(display.columns - int(pad), 70)
if isinstance(doc['description'], list):
desc = " ".join(doc['description'])
else:
desc = doc['description']
text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=" ", subsequent_indent=" "))
if 'deprecated' in doc and doc['deprecated'] is not None and len(doc['deprecated']) > 0:
text.append("DEPRECATED: \n%s\n" % doc['deprecated'])
if 'action' in doc and doc['action']:
text.append(" * note: %s\n" % "This module has a corresponding action plugin.")
if 'option_keys' in doc and len(doc['option_keys']) > 0:
text.append("Options (= is mandatory):\n")
for o in sorted(doc['option_keys']):
opt = doc['options'][o]
required = opt.get('required', False)
if not isinstance(required, bool):
raise("Incorrect value for 'Required', a boolean is needed.: %s" % required)
if required:
opt_leadin = "="
else:
opt_leadin = "-"
text.append("%s %s" % (opt_leadin, o))
if isinstance(opt['description'], list):
for entry in opt['description']:
text.append(textwrap.fill(CLI.tty_ify(entry), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
else:
text.append(textwrap.fill(CLI.tty_ify(opt['description']), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
choices = ''
if 'choices' in opt:
choices = "(Choices: " + ", ".join(str(i) for i in opt['choices']) + ")"
default = ''
if 'default' in opt or not required:
default = "[Default: " + str(opt.get('default', '(null)')) + "]"
text.append(textwrap.fill(CLI.tty_ify(choices + default), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
text.append("Notes:")
for note in doc['notes']:
text.append(textwrap.fill(CLI.tty_ify(note), limit-6, initial_indent=" * ", subsequent_indent=opt_indent))
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
req = ", ".join(doc['requirements'])
text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit-16, initial_indent=" ", subsequent_indent=opt_indent))
if 'examples' in doc and len(doc['examples']) > 0:
text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
for ex in doc['examples']:
text.append("%s\n" % (ex['code']))
if 'plainexamples' in doc and doc['plainexamples'] is not None:
text.append("EXAMPLES:")
text.append(doc['plainexamples'])
if 'returndocs' in doc and doc['returndocs'] is not None:
text.append("RETURN VALUES:")
text.append(doc['returndocs'])
text.append('')
maintainers = set()
if 'author' in doc:
if isinstance(doc['author'], string_types):
maintainers.add(doc['author'])
else:
maintainers.update(doc['author'])
if 'maintainers' in doc:
if isinstance(doc['maintainers'], string_types):
maintainers.add(doc['author'])
else:
maintainers.update(doc['author'])
text.append('MAINTAINERS: ' + ', '.join(maintainers))
text.append('')
return "\n".join(text)
| gpl-3.0 |
cjaymes/pyscap | src/scap/model/xnl_2_0/NameDetailsType.py | 1 | 1408 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.Model import Model
import logging
logger = logging.getLogger(__name__)
class NameDetailsType(Model):
MODEL_MAP = {
'tag_name': 'NameDetails',
'elements': [
{'tag_name': 'NameLine', 'list': 'name_lines', 'class': 'NameLineType'},
{'tag_name': 'PersonName', 'in': 'person_name', 'class': 'PersonNameElement'},
{'tag_name': 'JointPersonName', 'in': 'joint_person_name', 'class': 'JointPersonNameElement'},
{'tag_name': 'OrganisationNameDetails', 'in': 'organisation_name_details', 'class': 'OrganisationNameDetailsElement'},
],
'attributes': {
'PartyType': {},
'Code': {},
'*': {},
},
}
| gpl-3.0 |
michael-dev2rights/ansible | lib/ansible/modules/cloud/rackspace/rax_scaling_policy.py | 33 | 8750 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_scaling_policy
short_description: Manipulate Rackspace Cloud Autoscale Scaling Policy
description:
- Manipulate Rackspace Cloud Autoscale Scaling Policy
version_added: 1.7
options:
at:
description:
- The UTC time when this policy will be executed. The time must be
formatted according to C(yyyy-MM-dd'T'HH:mm:ss.SSS) such as
C(2013-05-19T08:07:08Z)
change:
description:
- The change, either as a number of servers or as a percentage, to make
in the scaling group. If this is a percentage, you must set
I(is_percent) to C(true) also.
cron:
description:
- The time when the policy will be executed, as a cron entry. For
example, if this is parameter is set to C(1 0 * * *)
cooldown:
description:
- The period of time, in seconds, that must pass before any scaling can
occur after the previous scaling. Must be an integer between 0 and
86400 (24 hrs).
desired_capacity:
description:
- The desired server capacity of the scaling the group; that is, how
many servers should be in the scaling group.
is_percent:
description:
- Whether the value in I(change) is a percent value
default: false
name:
description:
- Name to give the policy
required: true
policy_type:
description:
- The type of policy that will be executed for the current release.
choices:
- webhook
- schedule
required: true
scaling_group:
description:
- Name of the scaling group that this policy will be added to
required: true
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
author: "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
---
- hosts: localhost
gather_facts: false
connection: local
tasks:
- rax_scaling_policy:
credentials: ~/.raxpub
region: ORD
at: '2013-05-19T08:07:08Z'
change: 25
cooldown: 300
is_percent: true
name: ASG Test Policy - at
policy_type: schedule
scaling_group: ASG Test
register: asps_at
- rax_scaling_policy:
credentials: ~/.raxpub
region: ORD
cron: '1 0 * * *'
change: 25
cooldown: 300
is_percent: true
name: ASG Test Policy - cron
policy_type: schedule
scaling_group: ASG Test
register: asp_cron
- rax_scaling_policy:
credentials: ~/.raxpub
region: ORD
cooldown: 300
desired_capacity: 5
name: ASG Test Policy - webhook
policy_type: webhook
scaling_group: ASG Test
register: asp_webhook
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.rax import (UUID, rax_argument_spec, rax_required_together, rax_to_dict,
setup_rax_module)
def rax_asp(module, at=None, change=0, cron=None, cooldown=300,
desired_capacity=0, is_percent=False, name=None,
policy_type=None, scaling_group=None, state='present'):
changed = False
au = pyrax.autoscale
if not au:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
try:
UUID(scaling_group)
except ValueError:
try:
sg = au.find(name=scaling_group)
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
try:
sg = au.get(scaling_group)
except Exception as e:
module.fail_json(msg='%s' % e.message)
if state == 'present':
policies = filter(lambda p: name == p.name, sg.list_policies())
if len(policies) > 1:
module.fail_json(msg='No unique policy match found by name')
if at:
args = dict(at=at)
elif cron:
args = dict(cron=cron)
else:
args = None
if not policies:
try:
policy = sg.add_policy(name, policy_type=policy_type,
cooldown=cooldown, change=change,
is_percent=is_percent,
desired_capacity=desired_capacity,
args=args)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
policy = policies[0]
kwargs = {}
if policy_type != policy.type:
kwargs['policy_type'] = policy_type
if cooldown != policy.cooldown:
kwargs['cooldown'] = cooldown
if hasattr(policy, 'change') and change != policy.change:
kwargs['change'] = change
if hasattr(policy, 'changePercent') and is_percent is False:
kwargs['change'] = change
kwargs['is_percent'] = False
elif hasattr(policy, 'change') and is_percent is True:
kwargs['change'] = change
kwargs['is_percent'] = True
if hasattr(policy, 'desiredCapacity') and change:
kwargs['change'] = change
elif ((hasattr(policy, 'change') or
hasattr(policy, 'changePercent')) and desired_capacity):
kwargs['desired_capacity'] = desired_capacity
if hasattr(policy, 'args') and args != policy.args:
kwargs['args'] = args
if kwargs:
policy.update(**kwargs)
changed = True
policy.get()
module.exit_json(changed=changed, autoscale_policy=rax_to_dict(policy))
else:
try:
policies = filter(lambda p: name == p.name, sg.list_policies())
if len(policies) > 1:
module.fail_json(msg='No unique policy match found by name')
elif not policies:
policy = {}
else:
policy.delete()
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
module.exit_json(changed=changed, autoscale_policy=rax_to_dict(policy))
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
at=dict(),
change=dict(type='int'),
cron=dict(),
cooldown=dict(type='int', default=300),
desired_capacity=dict(type='int'),
is_percent=dict(type='bool', default=False),
name=dict(required=True),
policy_type=dict(required=True, choices=['webhook', 'schedule']),
scaling_group=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
mutually_exclusive=[
['cron', 'at'],
['change', 'desired_capacity'],
]
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
at = module.params.get('at')
change = module.params.get('change')
cron = module.params.get('cron')
cooldown = module.params.get('cooldown')
desired_capacity = module.params.get('desired_capacity')
is_percent = module.params.get('is_percent')
name = module.params.get('name')
policy_type = module.params.get('policy_type')
scaling_group = module.params.get('scaling_group')
state = module.params.get('state')
if (at or cron) and policy_type == 'webhook':
module.fail_json(msg='policy_type=schedule is required for a time '
'based policy')
setup_rax_module(module, pyrax)
rax_asp(module, at=at, change=change, cron=cron, cooldown=cooldown,
desired_capacity=desired_capacity, is_percent=is_percent,
name=name, policy_type=policy_type, scaling_group=scaling_group,
state=state)
if __name__ == '__main__':
main()
| gpl-3.0 |
rjw57/vy | vyapp/plugins/codepad.py | 7 | 1473 | """
"""
import urllib
import urllib2
def post(data, lang, opt=False):
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
lang_map = {
'c':'C',
'cpp':'C++',
'd':'D',
'hs':'Haskell',
'lua':'Lua',
'ocaml':'OCaml',
'php':'PHP',
'pl':'Perl',
'py':'Python',
'rb':'Ruby',
'scm':'Scheme',
'tcl':'Tcl'
}
head = {
'code':data,
'lang':lang_map.get(lang, 'Plain Text'),
'submit':'Submit'
}
url = 'http://codepad.org'
head['run'] = opt
pointer = opener.open(url, urllib.urlencode(head))
#output = pointer.re()
new_url = pointer.geturl()
return pointer, new_url
def CPPaste():
import webbrowser
from os.path import splitext
from vyapp.areavi import AreaVi
area = AreaVi.ACTIVE
# The areavi in which the execute cmd even was
# issued from.
data = area.get('1.0', 'end')
data = data.encode('utf-8')
_, ext = splitext(area.filename)
pointer, new_url = post(data, ext, True)
webbrowser.open(new_url)
from vyapp.plugins import ENV
ENV['CPPaste'] = CPPaste
| mit |
ContinuumIO/numpy | numpy/lib/tests/test_recfunctions.py | 89 | 30630 | from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.ma as ma
from numpy.ma.mrecords import MaskedRecords
from numpy.ma.testutils import assert_equal
from numpy.testing import TestCase, run_module_suite, assert_
from numpy.lib.recfunctions import (
drop_fields, rename_fields, get_fieldstructure, recursive_fill_fields,
find_duplicates, merge_arrays, append_fields, stack_arrays, join_by
)
get_names = np.lib.recfunctions.get_names
get_names_flat = np.lib.recfunctions.get_names_flat
zip_descr = np.lib.recfunctions.zip_descr
class TestRecFunctions(TestCase):
# Misc tests
def setUp(self):
x = np.array([1, 2, ])
y = np.array([10, 20, 30])
z = np.array([('A', 1.), ('B', 2.)],
dtype=[('A', '|S3'), ('B', float)])
w = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
self.data = (w, x, y, z)
def test_zip_descr(self):
# Test zip_descr
(w, x, y, z) = self.data
# Std array
test = zip_descr((x, x), flatten=True)
assert_equal(test,
np.dtype([('', int), ('', int)]))
test = zip_descr((x, x), flatten=False)
assert_equal(test,
np.dtype([('', int), ('', int)]))
# Std & flexible-dtype
test = zip_descr((x, z), flatten=True)
assert_equal(test,
np.dtype([('', int), ('A', '|S3'), ('B', float)]))
test = zip_descr((x, z), flatten=False)
assert_equal(test,
np.dtype([('', int),
('', [('A', '|S3'), ('B', float)])]))
# Standard & nested dtype
test = zip_descr((x, w), flatten=True)
assert_equal(test,
np.dtype([('', int),
('a', int),
('ba', float), ('bb', int)]))
test = zip_descr((x, w), flatten=False)
assert_equal(test,
np.dtype([('', int),
('', [('a', int),
('b', [('ba', float), ('bb', int)])])]))
def test_drop_fields(self):
# Test drop_fields
a = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
# A basic field
test = drop_fields(a, 'a')
control = np.array([((2, 3.0),), ((5, 6.0),)],
dtype=[('b', [('ba', float), ('bb', int)])])
assert_equal(test, control)
# Another basic field (but nesting two fields)
test = drop_fields(a, 'b')
control = np.array([(1,), (4,)], dtype=[('a', int)])
assert_equal(test, control)
# A nested sub-field
test = drop_fields(a, ['ba', ])
control = np.array([(1, (3.0,)), (4, (6.0,))],
dtype=[('a', int), ('b', [('bb', int)])])
assert_equal(test, control)
# All the nested sub-field from a field: zap that field
test = drop_fields(a, ['ba', 'bb'])
control = np.array([(1,), (4,)], dtype=[('a', int)])
assert_equal(test, control)
test = drop_fields(a, ['a', 'b'])
assert_(test is None)
def test_rename_fields(self):
# Test rename fields
a = np.array([(1, (2, [3.0, 30.])), (4, (5, [6.0, 60.]))],
dtype=[('a', int),
('b', [('ba', float), ('bb', (float, 2))])])
test = rename_fields(a, {'a': 'A', 'bb': 'BB'})
newdtype = [('A', int), ('b', [('ba', float), ('BB', (float, 2))])]
control = a.view(newdtype)
assert_equal(test.dtype, newdtype)
assert_equal(test, control)
def test_get_names(self):
# Test get_names
ndtype = np.dtype([('A', '|S3'), ('B', float)])
test = get_names(ndtype)
assert_equal(test, ('A', 'B'))
ndtype = np.dtype([('a', int), ('b', [('ba', float), ('bb', int)])])
test = get_names(ndtype)
assert_equal(test, ('a', ('b', ('ba', 'bb'))))
def test_get_names_flat(self):
# Test get_names_flat
ndtype = np.dtype([('A', '|S3'), ('B', float)])
test = get_names_flat(ndtype)
assert_equal(test, ('A', 'B'))
ndtype = np.dtype([('a', int), ('b', [('ba', float), ('bb', int)])])
test = get_names_flat(ndtype)
assert_equal(test, ('a', 'b', 'ba', 'bb'))
def test_get_fieldstructure(self):
# Test get_fieldstructure
# No nested fields
ndtype = np.dtype([('A', '|S3'), ('B', float)])
test = get_fieldstructure(ndtype)
assert_equal(test, {'A': [], 'B': []})
# One 1-nested field
ndtype = np.dtype([('A', int), ('B', [('BA', float), ('BB', '|S1')])])
test = get_fieldstructure(ndtype)
assert_equal(test, {'A': [], 'B': [], 'BA': ['B', ], 'BB': ['B']})
# One 2-nested fields
ndtype = np.dtype([('A', int),
('B', [('BA', int),
('BB', [('BBA', int), ('BBB', int)])])])
test = get_fieldstructure(ndtype)
control = {'A': [], 'B': [], 'BA': ['B'], 'BB': ['B'],
'BBA': ['B', 'BB'], 'BBB': ['B', 'BB']}
assert_equal(test, control)
def test_find_duplicates(self):
# Test find_duplicates
a = ma.array([(2, (2., 'B')), (1, (2., 'B')), (2, (2., 'B')),
(1, (1., 'B')), (2, (2., 'B')), (2, (2., 'C'))],
mask=[(0, (0, 0)), (0, (0, 0)), (0, (0, 0)),
(0, (0, 0)), (1, (0, 0)), (0, (1, 0))],
dtype=[('A', int), ('B', [('BA', float), ('BB', '|S1')])])
test = find_duplicates(a, ignoremask=False, return_index=True)
control = [0, 2]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
test = find_duplicates(a, key='A', return_index=True)
control = [0, 1, 2, 3, 5]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
test = find_duplicates(a, key='B', return_index=True)
control = [0, 1, 2, 4]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
test = find_duplicates(a, key='BA', return_index=True)
control = [0, 1, 2, 4]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
test = find_duplicates(a, key='BB', return_index=True)
control = [0, 1, 2, 3, 4]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
def test_find_duplicates_ignoremask(self):
# Test the ignoremask option of find_duplicates
ndtype = [('a', int)]
a = ma.array([1, 1, 1, 2, 2, 3, 3],
mask=[0, 0, 1, 0, 0, 0, 1]).view(ndtype)
test = find_duplicates(a, ignoremask=True, return_index=True)
control = [0, 1, 3, 4]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
test = find_duplicates(a, ignoremask=False, return_index=True)
control = [0, 1, 2, 3, 4, 6]
assert_equal(sorted(test[-1]), control)
assert_equal(test[0], a[test[-1]])
class TestRecursiveFillFields(TestCase):
# Test recursive_fill_fields.
def test_simple_flexible(self):
# Test recursive_fill_fields on flexible-array
a = np.array([(1, 10.), (2, 20.)], dtype=[('A', int), ('B', float)])
b = np.zeros((3,), dtype=a.dtype)
test = recursive_fill_fields(a, b)
control = np.array([(1, 10.), (2, 20.), (0, 0.)],
dtype=[('A', int), ('B', float)])
assert_equal(test, control)
def test_masked_flexible(self):
# Test recursive_fill_fields on masked flexible-array
a = ma.array([(1, 10.), (2, 20.)], mask=[(0, 1), (1, 0)],
dtype=[('A', int), ('B', float)])
b = ma.zeros((3,), dtype=a.dtype)
test = recursive_fill_fields(a, b)
control = ma.array([(1, 10.), (2, 20.), (0, 0.)],
mask=[(0, 1), (1, 0), (0, 0)],
dtype=[('A', int), ('B', float)])
assert_equal(test, control)
class TestMergeArrays(TestCase):
# Test merge_arrays
def setUp(self):
x = np.array([1, 2, ])
y = np.array([10, 20, 30])
z = np.array(
[('A', 1.), ('B', 2.)], dtype=[('A', '|S3'), ('B', float)])
w = np.array(
[(1, (2, 3.0)), (4, (5, 6.0))],
dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
self.data = (w, x, y, z)
def test_solo(self):
# Test merge_arrays on a single array.
(_, x, _, z) = self.data
test = merge_arrays(x)
control = np.array([(1,), (2,)], dtype=[('f0', int)])
assert_equal(test, control)
test = merge_arrays((x,))
assert_equal(test, control)
test = merge_arrays(z, flatten=False)
assert_equal(test, z)
test = merge_arrays(z, flatten=True)
assert_equal(test, z)
def test_solo_w_flatten(self):
# Test merge_arrays on a single array w & w/o flattening
w = self.data[0]
test = merge_arrays(w, flatten=False)
assert_equal(test, w)
test = merge_arrays(w, flatten=True)
control = np.array([(1, 2, 3.0), (4, 5, 6.0)],
dtype=[('a', int), ('ba', float), ('bb', int)])
assert_equal(test, control)
def test_standard(self):
# Test standard & standard
# Test merge arrays
(_, x, y, _) = self.data
test = merge_arrays((x, y), usemask=False)
control = np.array([(1, 10), (2, 20), (-1, 30)],
dtype=[('f0', int), ('f1', int)])
assert_equal(test, control)
test = merge_arrays((x, y), usemask=True)
control = ma.array([(1, 10), (2, 20), (-1, 30)],
mask=[(0, 0), (0, 0), (1, 0)],
dtype=[('f0', int), ('f1', int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_flatten(self):
# Test standard & flexible
(_, x, _, z) = self.data
test = merge_arrays((x, z), flatten=True)
control = np.array([(1, 'A', 1.), (2, 'B', 2.)],
dtype=[('f0', int), ('A', '|S3'), ('B', float)])
assert_equal(test, control)
test = merge_arrays((x, z), flatten=False)
control = np.array([(1, ('A', 1.)), (2, ('B', 2.))],
dtype=[('f0', int),
('f1', [('A', '|S3'), ('B', float)])])
assert_equal(test, control)
def test_flatten_wflexible(self):
# Test flatten standard & nested
(w, x, _, _) = self.data
test = merge_arrays((x, w), flatten=True)
control = np.array([(1, 1, 2, 3.0), (2, 4, 5, 6.0)],
dtype=[('f0', int),
('a', int), ('ba', float), ('bb', int)])
assert_equal(test, control)
test = merge_arrays((x, w), flatten=False)
controldtype = [('f0', int),
('f1', [('a', int),
('b', [('ba', float), ('bb', int)])])]
control = np.array([(1., (1, (2, 3.0))), (2, (4, (5, 6.0)))],
dtype=controldtype)
assert_equal(test, control)
def test_wmasked_arrays(self):
# Test merge_arrays masked arrays
(_, x, _, _) = self.data
mx = ma.array([1, 2, 3], mask=[1, 0, 0])
test = merge_arrays((x, mx), usemask=True)
control = ma.array([(1, 1), (2, 2), (-1, 3)],
mask=[(0, 1), (0, 0), (1, 0)],
dtype=[('f0', int), ('f1', int)])
assert_equal(test, control)
test = merge_arrays((x, mx), usemask=True, asrecarray=True)
assert_equal(test, control)
assert_(isinstance(test, MaskedRecords))
def test_w_singlefield(self):
# Test single field
test = merge_arrays((np.array([1, 2]).view([('a', int)]),
np.array([10., 20., 30.])),)
control = ma.array([(1, 10.), (2, 20.), (-1, 30.)],
mask=[(0, 0), (0, 0), (1, 0)],
dtype=[('a', int), ('f1', float)])
assert_equal(test, control)
def test_w_shorter_flex(self):
# Test merge_arrays w/ a shorter flexndarray.
z = self.data[-1]
# Fixme, this test looks incomplete and broken
#test = merge_arrays((z, np.array([10, 20, 30]).view([('C', int)])))
#control = np.array([('A', 1., 10), ('B', 2., 20), ('-1', -1, 20)],
# dtype=[('A', '|S3'), ('B', float), ('C', int)])
#assert_equal(test, control)
# Hack to avoid pyflakes warnings about unused variables
merge_arrays((z, np.array([10, 20, 30]).view([('C', int)])))
np.array([('A', 1., 10), ('B', 2., 20), ('-1', -1, 20)],
dtype=[('A', '|S3'), ('B', float), ('C', int)])
def test_singlerecord(self):
(_, x, y, z) = self.data
test = merge_arrays((x[0], y[0], z[0]), usemask=False)
control = np.array([(1, 10, ('A', 1))],
dtype=[('f0', int),
('f1', int),
('f2', [('A', '|S3'), ('B', float)])])
assert_equal(test, control)
class TestAppendFields(TestCase):
# Test append_fields
def setUp(self):
x = np.array([1, 2, ])
y = np.array([10, 20, 30])
z = np.array(
[('A', 1.), ('B', 2.)], dtype=[('A', '|S3'), ('B', float)])
w = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
self.data = (w, x, y, z)
def test_append_single(self):
# Test simple case
(_, x, _, _) = self.data
test = append_fields(x, 'A', data=[10, 20, 30])
control = ma.array([(1, 10), (2, 20), (-1, 30)],
mask=[(0, 0), (0, 0), (1, 0)],
dtype=[('f0', int), ('A', int)],)
assert_equal(test, control)
def test_append_double(self):
# Test simple case
(_, x, _, _) = self.data
test = append_fields(x, ('A', 'B'), data=[[10, 20, 30], [100, 200]])
control = ma.array([(1, 10, 100), (2, 20, 200), (-1, 30, -1)],
mask=[(0, 0, 0), (0, 0, 0), (1, 0, 1)],
dtype=[('f0', int), ('A', int), ('B', int)],)
assert_equal(test, control)
def test_append_on_flex(self):
# Test append_fields on flexible type arrays
z = self.data[-1]
test = append_fields(z, 'C', data=[10, 20, 30])
control = ma.array([('A', 1., 10), ('B', 2., 20), (-1, -1., 30)],
mask=[(0, 0, 0), (0, 0, 0), (1, 1, 0)],
dtype=[('A', '|S3'), ('B', float), ('C', int)],)
assert_equal(test, control)
def test_append_on_nested(self):
# Test append_fields on nested fields
w = self.data[0]
test = append_fields(w, 'C', data=[10, 20, 30])
control = ma.array([(1, (2, 3.0), 10),
(4, (5, 6.0), 20),
(-1, (-1, -1.), 30)],
mask=[(
0, (0, 0), 0), (0, (0, 0), 0), (1, (1, 1), 0)],
dtype=[('a', int),
('b', [('ba', float), ('bb', int)]),
('C', int)],)
assert_equal(test, control)
class TestStackArrays(TestCase):
# Test stack_arrays
def setUp(self):
x = np.array([1, 2, ])
y = np.array([10, 20, 30])
z = np.array(
[('A', 1.), ('B', 2.)], dtype=[('A', '|S3'), ('B', float)])
w = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
self.data = (w, x, y, z)
def test_solo(self):
# Test stack_arrays on single arrays
(_, x, _, _) = self.data
test = stack_arrays((x,))
assert_equal(test, x)
self.assertTrue(test is x)
test = stack_arrays(x)
assert_equal(test, x)
self.assertTrue(test is x)
def test_unnamed_fields(self):
# Tests combinations of arrays w/o named fields
(_, x, y, _) = self.data
test = stack_arrays((x, x), usemask=False)
control = np.array([1, 2, 1, 2])
assert_equal(test, control)
test = stack_arrays((x, y), usemask=False)
control = np.array([1, 2, 10, 20, 30])
assert_equal(test, control)
test = stack_arrays((y, x), usemask=False)
control = np.array([10, 20, 30, 1, 2])
assert_equal(test, control)
def test_unnamed_and_named_fields(self):
# Test combination of arrays w/ & w/o named fields
(_, x, _, z) = self.data
test = stack_arrays((x, z))
control = ma.array([(1, -1, -1), (2, -1, -1),
(-1, 'A', 1), (-1, 'B', 2)],
mask=[(0, 1, 1), (0, 1, 1),
(1, 0, 0), (1, 0, 0)],
dtype=[('f0', int), ('A', '|S3'), ('B', float)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
test = stack_arrays((z, x))
control = ma.array([('A', 1, -1), ('B', 2, -1),
(-1, -1, 1), (-1, -1, 2), ],
mask=[(0, 0, 1), (0, 0, 1),
(1, 1, 0), (1, 1, 0)],
dtype=[('A', '|S3'), ('B', float), ('f2', int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
test = stack_arrays((z, z, x))
control = ma.array([('A', 1, -1), ('B', 2, -1),
('A', 1, -1), ('B', 2, -1),
(-1, -1, 1), (-1, -1, 2), ],
mask=[(0, 0, 1), (0, 0, 1),
(0, 0, 1), (0, 0, 1),
(1, 1, 0), (1, 1, 0)],
dtype=[('A', '|S3'), ('B', float), ('f2', int)])
assert_equal(test, control)
def test_matching_named_fields(self):
# Test combination of arrays w/ matching field names
(_, x, _, z) = self.data
zz = np.array([('a', 10., 100.), ('b', 20., 200.), ('c', 30., 300.)],
dtype=[('A', '|S3'), ('B', float), ('C', float)])
test = stack_arrays((z, zz))
control = ma.array([('A', 1, -1), ('B', 2, -1),
(
'a', 10., 100.), ('b', 20., 200.), ('c', 30., 300.)],
dtype=[('A', '|S3'), ('B', float), ('C', float)],
mask=[(0, 0, 1), (0, 0, 1),
(0, 0, 0), (0, 0, 0), (0, 0, 0)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
test = stack_arrays((z, zz, x))
ndtype = [('A', '|S3'), ('B', float), ('C', float), ('f3', int)]
control = ma.array([('A', 1, -1, -1), ('B', 2, -1, -1),
('a', 10., 100., -1), ('b', 20., 200., -1),
('c', 30., 300., -1),
(-1, -1, -1, 1), (-1, -1, -1, 2)],
dtype=ndtype,
mask=[(0, 0, 1, 1), (0, 0, 1, 1),
(0, 0, 0, 1), (0, 0, 0, 1), (0, 0, 0, 1),
(1, 1, 1, 0), (1, 1, 1, 0)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_defaults(self):
# Test defaults: no exception raised if keys of defaults are not fields.
(_, _, _, z) = self.data
zz = np.array([('a', 10., 100.), ('b', 20., 200.), ('c', 30., 300.)],
dtype=[('A', '|S3'), ('B', float), ('C', float)])
defaults = {'A': '???', 'B': -999., 'C': -9999., 'D': -99999.}
test = stack_arrays((z, zz), defaults=defaults)
control = ma.array([('A', 1, -9999.), ('B', 2, -9999.),
(
'a', 10., 100.), ('b', 20., 200.), ('c', 30., 300.)],
dtype=[('A', '|S3'), ('B', float), ('C', float)],
mask=[(0, 0, 1), (0, 0, 1),
(0, 0, 0), (0, 0, 0), (0, 0, 0)])
assert_equal(test, control)
assert_equal(test.data, control.data)
assert_equal(test.mask, control.mask)
def test_autoconversion(self):
# Tests autoconversion
adtype = [('A', int), ('B', bool), ('C', float)]
a = ma.array([(1, 2, 3)], mask=[(0, 1, 0)], dtype=adtype)
bdtype = [('A', int), ('B', float), ('C', float)]
b = ma.array([(4, 5, 6)], dtype=bdtype)
control = ma.array([(1, 2, 3), (4, 5, 6)], mask=[(0, 1, 0), (0, 0, 0)],
dtype=bdtype)
test = stack_arrays((a, b), autoconvert=True)
assert_equal(test, control)
assert_equal(test.mask, control.mask)
try:
test = stack_arrays((a, b), autoconvert=False)
except TypeError:
pass
else:
raise AssertionError
def test_checktitles(self):
# Test using titles in the field names
adtype = [(('a', 'A'), int), (('b', 'B'), bool), (('c', 'C'), float)]
a = ma.array([(1, 2, 3)], mask=[(0, 1, 0)], dtype=adtype)
bdtype = [(('a', 'A'), int), (('b', 'B'), bool), (('c', 'C'), float)]
b = ma.array([(4, 5, 6)], dtype=bdtype)
test = stack_arrays((a, b))
control = ma.array([(1, 2, 3), (4, 5, 6)], mask=[(0, 1, 0), (0, 0, 0)],
dtype=bdtype)
assert_equal(test, control)
assert_equal(test.mask, control.mask)
class TestJoinBy(TestCase):
def setUp(self):
self.a = np.array(list(zip(np.arange(10), np.arange(50, 60),
np.arange(100, 110))),
dtype=[('a', int), ('b', int), ('c', int)])
self.b = np.array(list(zip(np.arange(5, 15), np.arange(65, 75),
np.arange(100, 110))),
dtype=[('a', int), ('b', int), ('d', int)])
def test_inner_join(self):
# Basic test of join_by
a, b = self.a, self.b
test = join_by('a', a, b, jointype='inner')
control = np.array([(5, 55, 65, 105, 100), (6, 56, 66, 106, 101),
(7, 57, 67, 107, 102), (8, 58, 68, 108, 103),
(9, 59, 69, 109, 104)],
dtype=[('a', int), ('b1', int), ('b2', int),
('c', int), ('d', int)])
assert_equal(test, control)
def test_join(self):
a, b = self.a, self.b
# Fixme, this test is broken
#test = join_by(('a', 'b'), a, b)
#control = np.array([(5, 55, 105, 100), (6, 56, 106, 101),
# (7, 57, 107, 102), (8, 58, 108, 103),
# (9, 59, 109, 104)],
# dtype=[('a', int), ('b', int),
# ('c', int), ('d', int)])
#assert_equal(test, control)
# Hack to avoid pyflakes unused variable warnings
join_by(('a', 'b'), a, b)
np.array([(5, 55, 105, 100), (6, 56, 106, 101),
(7, 57, 107, 102), (8, 58, 108, 103),
(9, 59, 109, 104)],
dtype=[('a', int), ('b', int),
('c', int), ('d', int)])
def test_outer_join(self):
a, b = self.a, self.b
test = join_by(('a', 'b'), a, b, 'outer')
control = ma.array([(0, 50, 100, -1), (1, 51, 101, -1),
(2, 52, 102, -1), (3, 53, 103, -1),
(4, 54, 104, -1), (5, 55, 105, -1),
(5, 65, -1, 100), (6, 56, 106, -1),
(6, 66, -1, 101), (7, 57, 107, -1),
(7, 67, -1, 102), (8, 58, 108, -1),
(8, 68, -1, 103), (9, 59, 109, -1),
(9, 69, -1, 104), (10, 70, -1, 105),
(11, 71, -1, 106), (12, 72, -1, 107),
(13, 73, -1, 108), (14, 74, -1, 109)],
mask=[(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 1, 0), (0, 0, 0, 1),
(0, 0, 1, 0), (0, 0, 0, 1),
(0, 0, 1, 0), (0, 0, 0, 1),
(0, 0, 1, 0), (0, 0, 0, 1),
(0, 0, 1, 0), (0, 0, 1, 0),
(0, 0, 1, 0), (0, 0, 1, 0),
(0, 0, 1, 0), (0, 0, 1, 0)],
dtype=[('a', int), ('b', int),
('c', int), ('d', int)])
assert_equal(test, control)
def test_leftouter_join(self):
a, b = self.a, self.b
test = join_by(('a', 'b'), a, b, 'leftouter')
control = ma.array([(0, 50, 100, -1), (1, 51, 101, -1),
(2, 52, 102, -1), (3, 53, 103, -1),
(4, 54, 104, -1), (5, 55, 105, -1),
(6, 56, 106, -1), (7, 57, 107, -1),
(8, 58, 108, -1), (9, 59, 109, -1)],
mask=[(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1),
(0, 0, 0, 1), (0, 0, 0, 1)],
dtype=[('a', int), ('b', int), ('c', int), ('d', int)])
assert_equal(test, control)
class TestJoinBy2(TestCase):
@classmethod
def setUp(cls):
cls.a = np.array(list(zip(np.arange(10), np.arange(50, 60),
np.arange(100, 110))),
dtype=[('a', int), ('b', int), ('c', int)])
cls.b = np.array(list(zip(np.arange(10), np.arange(65, 75),
np.arange(100, 110))),
dtype=[('a', int), ('b', int), ('d', int)])
def test_no_r1postfix(self):
# Basic test of join_by no_r1postfix
a, b = self.a, self.b
test = join_by(
'a', a, b, r1postfix='', r2postfix='2', jointype='inner')
control = np.array([(0, 50, 65, 100, 100), (1, 51, 66, 101, 101),
(2, 52, 67, 102, 102), (3, 53, 68, 103, 103),
(4, 54, 69, 104, 104), (5, 55, 70, 105, 105),
(6, 56, 71, 106, 106), (7, 57, 72, 107, 107),
(8, 58, 73, 108, 108), (9, 59, 74, 109, 109)],
dtype=[('a', int), ('b', int), ('b2', int),
('c', int), ('d', int)])
assert_equal(test, control)
def test_no_postfix(self):
self.assertRaises(ValueError, join_by, 'a', self.a, self.b,
r1postfix='', r2postfix='')
def test_no_r2postfix(self):
# Basic test of join_by no_r2postfix
a, b = self.a, self.b
test = join_by(
'a', a, b, r1postfix='1', r2postfix='', jointype='inner')
control = np.array([(0, 50, 65, 100, 100), (1, 51, 66, 101, 101),
(2, 52, 67, 102, 102), (3, 53, 68, 103, 103),
(4, 54, 69, 104, 104), (5, 55, 70, 105, 105),
(6, 56, 71, 106, 106), (7, 57, 72, 107, 107),
(8, 58, 73, 108, 108), (9, 59, 74, 109, 109)],
dtype=[('a', int), ('b1', int), ('b', int),
('c', int), ('d', int)])
assert_equal(test, control)
def test_two_keys_two_vars(self):
a = np.array(list(zip(np.tile([10, 11], 5), np.repeat(np.arange(5), 2),
np.arange(50, 60), np.arange(10, 20))),
dtype=[('k', int), ('a', int), ('b', int), ('c', int)])
b = np.array(list(zip(np.tile([10, 11], 5), np.repeat(np.arange(5), 2),
np.arange(65, 75), np.arange(0, 10))),
dtype=[('k', int), ('a', int), ('b', int), ('c', int)])
control = np.array([(10, 0, 50, 65, 10, 0), (11, 0, 51, 66, 11, 1),
(10, 1, 52, 67, 12, 2), (11, 1, 53, 68, 13, 3),
(10, 2, 54, 69, 14, 4), (11, 2, 55, 70, 15, 5),
(10, 3, 56, 71, 16, 6), (11, 3, 57, 72, 17, 7),
(10, 4, 58, 73, 18, 8), (11, 4, 59, 74, 19, 9)],
dtype=[('k', int), ('a', int), ('b1', int),
('b2', int), ('c1', int), ('c2', int)])
test = join_by(
['a', 'k'], a, b, r1postfix='1', r2postfix='2', jointype='inner')
assert_equal(test.dtype, control.dtype)
assert_equal(test, control)
class TestAppendFieldsObj(TestCase):
"""
Test append_fields with arrays containing objects
"""
# https://github.com/numpy/numpy/issues/2346
def setUp(self):
from datetime import date
self.data = dict(obj=date(2000, 1, 1))
def test_append_to_objects(self):
"Test append_fields when the base array contains objects"
obj = self.data['obj']
x = np.array([(obj, 1.), (obj, 2.)],
dtype=[('A', object), ('B', float)])
y = np.array([10, 20], dtype=int)
test = append_fields(x, 'C', data=y, usemask=False)
control = np.array([(obj, 1.0, 10), (obj, 2.0, 20)],
dtype=[('A', object), ('B', float), ('C', int)])
assert_equal(test, control)
if __name__ == '__main__':
run_module_suite()
| bsd-3-clause |
Bjwebb/pytest | _pytest/assertion/rewrite.py | 28 | 33808 | """Rewrite assertion AST to produce nice error messages"""
import ast
import errno
import itertools
import imp
import marshal
import os
import re
import struct
import sys
import types
import py
from _pytest.assertion import util
# pytest caches rewritten pycs in __pycache__.
if hasattr(imp, "get_tag"):
PYTEST_TAG = imp.get_tag() + "-PYTEST"
else:
if hasattr(sys, "pypy_version_info"):
impl = "pypy"
elif sys.platform == "java":
impl = "jython"
else:
impl = "cpython"
ver = sys.version_info
PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
del ver, impl
PYC_EXT = ".py" + (__debug__ and "c" or "o")
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
if sys.version_info >= (3,5):
ast_Call = ast.Call
else:
ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
class AssertionRewritingHook(object):
"""PEP302 Import hook which rewrites asserts."""
def __init__(self):
self.session = None
self.modules = {}
self._register_with_pkg_resources()
def set_session(self, session):
self.fnpats = session.config.getini("python_files")
self.session = session
def find_module(self, name, path=None):
if self.session is None:
return None
sess = self.session
state = sess.config._assertstate
state.trace("find_module called for: %s" % name)
names = name.rsplit(".", 1)
lastname = names[-1]
pth = None
if path is not None:
# Starting with Python 3.3, path is a _NamespacePath(), which
# causes problems if not converted to list.
path = list(path)
if len(path) == 1:
pth = path[0]
if pth is None:
try:
fd, fn, desc = imp.find_module(lastname, path)
except ImportError:
return None
if fd is not None:
fd.close()
tp = desc[2]
if tp == imp.PY_COMPILED:
if hasattr(imp, "source_from_cache"):
fn = imp.source_from_cache(fn)
else:
fn = fn[:-1]
elif tp != imp.PY_SOURCE:
# Don't know what this is.
return None
else:
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
fn_pypath = py.path.local(fn)
# Is this a test file?
if not sess.isinitpath(fn):
# We have to be very careful here because imports in this code can
# trigger a cycle.
self.session = None
try:
for pat in self.fnpats:
if fn_pypath.fnmatch(pat):
state.trace("matched test file %r" % (fn,))
break
else:
return None
finally:
self.session = sess
else:
state.trace("matched test file (was specified on cmdline): %r" %
(fn,))
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
if write:
try:
os.mkdir(cache_dir)
except OSError:
e = sys.exc_info()[1].errno
if e == errno.EEXIST:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
pass
elif e in [errno.ENOENT, errno.ENOTDIR]:
# One of the path components was not a directory, likely
# because we're in a zip file.
write = False
elif e in [errno.EACCES, errno.EROFS]:
state.trace("read only directory: %r" % fn_pypath.dirname)
write = False
else:
raise
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn_pypath, pyc, state.trace)
if co is None:
state.trace("rewriting %r" % (fn,))
source_stat, co = _rewrite_test(state, fn_pypath)
if co is None:
# Probably a SyntaxError in the test.
return None
if write:
_make_rewritten_pyc(state, source_stat, pyc, co)
else:
state.trace("found cached rewritten pyc for %r" % (fn,))
self.modules[name] = co, pyc
return self
def load_module(self, name):
# If there is an existing module object named 'fullname' in
# sys.modules, the loader must use that existing module. (Otherwise,
# the reload() builtin will not work correctly.)
if name in sys.modules:
return sys.modules[name]
co, pyc = self.modules.pop(name)
# I wish I could just call imp.load_compiled here, but __file__ has to
# be set properly. In Python 3.2+, this all would be handled correctly
# by load_compiled.
mod = sys.modules[name] = imp.new_module(name)
try:
mod.__file__ = co.co_filename
# Normally, this attribute is 3.2+.
mod.__cached__ = pyc
mod.__loader__ = self
py.builtin.exec_(co, mod.__dict__)
except:
del sys.modules[name]
raise
return sys.modules[name]
def is_package(self, name):
try:
fd, fn, desc = imp.find_module(name)
except ImportError:
return False
if fd is not None:
fd.close()
tp = desc[2]
return tp == imp.PKG_DIRECTORY
@classmethod
def _register_with_pkg_resources(cls):
"""
Ensure package resources can be loaded from this loader. May be called
multiple times, as the operation is idempotent.
"""
try:
import pkg_resources
# access an attribute in case a deferred importer is present
pkg_resources.__name__
except ImportError:
return
# Since pytest tests are always located in the file system, the
# DefaultProvider is appropriate.
pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
def get_data(self, pathname):
"""Optional PEP302 get_data API.
"""
with open(pathname, 'rb') as f:
return f.read()
def _write_pyc(state, co, source_stat, pyc):
# Technically, we don't have to have the same pyc format as
# (C)Python, since these "pycs" should never be seen by builtin
# import. However, there's little reason deviate, and I hope
# sometime to be able to use imp.load_compiled to load them. (See
# the comment in load_module above.)
try:
fp = open(pyc, "wb")
except IOError:
err = sys.exc_info()[1].errno
state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
# we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a
# file etc.
return False
try:
fp.write(imp.get_magic())
mtime = int(source_stat.mtime)
size = source_stat.size & 0xFFFFFFFF
fp.write(struct.pack("<ll", mtime, size))
marshal.dump(co, fp)
finally:
fp.close()
return True
RN = "\r\n".encode("utf-8")
N = "\n".encode("utf-8")
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
BOM_UTF8 = '\xef\xbb\xbf'
def _rewrite_test(state, fn):
"""Try to read and rewrite *fn* and return the code object."""
try:
stat = fn.stat()
source = fn.read("rb")
except EnvironmentError:
return None, None
if ASCII_IS_DEFAULT_ENCODING:
# ASCII is the default encoding in Python 2. Without a coding
# declaration, Python 2 will complain about any bytes in the file
# outside the ASCII range. Sadly, this behavior does not extend to
# compile() or ast.parse(), which prefer to interpret the bytes as
# latin-1. (At least they properly handle explicit coding cookies.) To
# preserve this error behavior, we could force ast.parse() to use ASCII
# as the encoding by inserting a coding cookie. Unfortunately, that
# messes up line numbers. Thus, we have to check ourselves if anything
# is outside the ASCII range in the case no encoding is explicitly
# declared. For more context, see issue #269. Yay for Python 3 which
# gets this right.
end1 = source.find("\n")
end2 = source.find("\n", end1 + 1)
if (not source.startswith(BOM_UTF8) and
cookie_re.match(source[0:end1]) is None and
cookie_re.match(source[end1 + 1:end2]) is None):
if hasattr(state, "_indecode"):
# encodings imported us again, so don't rewrite.
return None, None
state._indecode = True
try:
try:
source.decode("ascii")
except UnicodeDecodeError:
# Let it fail in real import.
return None, None
finally:
del state._indecode
# On Python versions which are not 2.7 and less than or equal to 3.1, the
# parser expects *nix newlines.
if REWRITE_NEWLINES:
source = source.replace(RN, N) + N
try:
tree = ast.parse(source)
except SyntaxError:
# Let this pop up again in the real import.
state.trace("failed to parse: %r" % (fn,))
return None, None
rewrite_asserts(tree)
try:
co = compile(tree, fn.strpath, "exec")
except SyntaxError:
# It's possible that this error is from some bug in the
# assertion rewriting, but I don't know of a fast way to tell.
state.trace("failed to compile: %r" % (fn,))
return None, None
return stat, co
def _make_rewritten_pyc(state, source_stat, pyc, co):
"""Try to dump rewritten code to *pyc*."""
if sys.platform.startswith("win"):
# Windows grants exclusive access to open files and doesn't have atomic
# rename, so just write into the final file.
_write_pyc(state, co, source_stat, pyc)
else:
# When not on windows, assume rename is atomic. Dump the code object
# into a file specific to this process and atomically replace it.
proc_pyc = pyc + "." + str(os.getpid())
if _write_pyc(state, co, source_stat, proc_pyc):
os.rename(proc_pyc, pyc)
def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code.
Return rewritten code if successful or None if not.
"""
try:
fp = open(pyc, "rb")
except IOError:
return None
with fp:
try:
mtime = int(source.mtime())
size = source.size()
data = fp.read(12)
except EnvironmentError as e:
trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
return None
# Check for invalid or out of date pyc file.
if (len(data) != 12 or data[:4] != imp.get_magic() or
struct.unpack("<ll", data[4:]) != (mtime, size)):
trace('_read_pyc(%s): invalid or out of date pyc' % source)
return None
try:
co = marshal.load(fp)
except Exception as e:
trace('_read_pyc(%s): marshal.load error %s' % (source, e))
return None
if not isinstance(co, types.CodeType):
trace('_read_pyc(%s): not a code object' % source)
return None
return co
def rewrite_asserts(mod):
"""Rewrite the assert statements in mod."""
AssertionRewriter().run(mod)
def _saferepr(obj):
"""Get a safe repr of an object for assertion error messages.
The assertion formatting (util.format_explanation()) requires
newlines to be escaped since they are a special character for it.
Normally assertion.util.format_explanation() does this but for a
custom repr it is possible to contain one of the special escape
sequences, especially '\n{' and '\n}' are likely to be present in
JSON reprs.
"""
repr = py.io.saferepr(obj)
if py.builtin._istext(repr):
t = py.builtin.text
else:
t = py.builtin.bytes
return repr.replace(t("\n"), t("\\n"))
from _pytest.assertion.util import format_explanation as _format_explanation # noqa
def _format_assertmsg(obj):
"""Format the custom assertion message given.
For strings this simply replaces newlines with '\n~' so that
util.format_explanation() will preserve them instead of escaping
newlines. For other objects py.io.saferepr() is used first.
"""
# reprlib appears to have a bug which means that if a string
# contains a newline it gets escaped, however if an object has a
# .__repr__() which contains newlines it does not get escaped.
# However in either case we want to preserve the newline.
if py.builtin._istext(obj) or py.builtin._isbytes(obj):
s = obj
is_repr = False
else:
s = py.io.saferepr(obj)
is_repr = True
if py.builtin._istext(s):
t = py.builtin.text
else:
t = py.builtin.bytes
s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
if is_repr:
s = s.replace(t("\\n"), t("\n~"))
return s
def _should_repr_global_name(obj):
return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if py.builtin._istext(explanation):
t = py.builtin.text
else:
t = py.builtin.bytes
return explanation.replace(t('%'), t('%%'))
def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls):
try:
done = not res
except Exception:
done = True
if done:
break
if util._reprcompare is not None:
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
if custom is not None:
return custom
return expl
unary_map = {
ast.Not: "not %s",
ast.Invert: "~%s",
ast.USub: "-%s",
ast.UAdd: "+%s"
}
binop_map = {
ast.BitOr: "|",
ast.BitXor: "^",
ast.BitAnd: "&",
ast.LShift: "<<",
ast.RShift: ">>",
ast.Add: "+",
ast.Sub: "-",
ast.Mult: "*",
ast.Div: "/",
ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==",
ast.NotEq: "!=",
ast.Lt: "<",
ast.LtE: "<=",
ast.Gt: ">",
ast.GtE: ">=",
ast.Pow: "**",
ast.Is: "is",
ast.IsNot: "is not",
ast.In: "in",
ast.NotIn: "not in"
}
# Python 3.4+ compatibility
if hasattr(ast, "NameConstant"):
_NameConstant = ast.NameConstant
else:
def _NameConstant(c):
return ast.Name(str(c), ast.Load())
def set_location(node, lineno, col_offset):
"""Set node location information recursively."""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
node.lineno = lineno
if "col_offset" in node._attributes:
node.col_offset = col_offset
for child in ast.iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, lineno, col_offset)
return node
class AssertionRewriter(ast.NodeVisitor):
"""Assertion rewriting implementation.
The main entrypoint is to call .run() with an ast.Module instance,
this will then find all the assert statements and re-write them to
provide intermediate values and a detailed assertion error. See
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
for an overview of how this works.
The entry point here is .run() which will iterate over all the
statements in an ast.Module and for each ast.Assert statement it
finds call .visit() with it. Then .visit_Assert() takes over and
is responsible for creating new ast statements to replace the
original assert statement: it re-writes the test of an assertion
to provide intermediate values and replace it with an if statement
which raises an assertion error with a detailed explanation in
case the expression is false.
For this .visit_Assert() uses the visitor pattern to visit all the
AST nodes of the ast.Assert.test field, each visit call returning
an AST node and the corresponding explanation string. During this
state is kept in several instance attributes:
:statements: All the AST statements which will replace the assert
statement.
:variables: This is populated by .variable() with each variable
used by the statements so that they can all be set to None at
the end of the statements.
:variable_counter: Counter to create new unique variables needed
by statements. Variables are created using .variable() and
have the form of "@py_assert0".
:on_failure: The AST statements which will be executed if the
assertion test fails. This is the code which will construct
the failure message and raises the AssertionError.
:explanation_specifiers: A dict filled by .explanation_param()
with %-formatting placeholders and their corresponding
expressions to use in the building of an assertion message.
This is used by .pop_format_context() to build a message.
:stack: A stack of the explanation_specifiers dicts maintained by
.push_format_context() and .pop_format_context() which allows
to build another %-formatted string while already building one.
This state is reset on every new assert statement visited and used
by the other visitors.
"""
def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
expect_docstring = True
pos = 0
lineno = 0
for item in mod.body:
if (expect_docstring and isinstance(item, ast.Expr) and
isinstance(item.value, ast.Str)):
doc = item.value.s
if "PYTEST_DONT_REWRITE" in doc:
# The module has disabled assertion rewriting.
return
lineno += len(doc) - 1
expect_docstring = False
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
item.module != "__future__"):
lineno = item.lineno
break
pos += 1
imports = [ast.Import([alias], lineno=lineno, col_offset=0)
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (isinstance(field, ast.AST) and
# Don't recurse into expressions as they can't contain
# asserts.
not isinstance(field, ast.expr)):
nodes.append(field)
def variable(self):
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
self.variables.append(name)
return name
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load())
def display(self, expr):
"""Call py.io.saferepr on the expression."""
return self.helper("saferepr", expr)
def helper(self, name, *args):
"""Call a helper in this module."""
py_name = ast.Name("@pytest_ar", ast.Load())
attr = ast.Attribute(py_name, "_" + name, ast.Load())
return ast_Call(attr, list(args), [])
def builtin(self, name):
"""Return the builtin called *name*."""
builtin_name = ast.Name("@py_builtins", ast.Load())
return ast.Attribute(builtin_name, name, ast.Load())
def explanation_param(self, expr):
"""Return a new named %-formatting placeholder for expr.
This creates a %-formatting placeholder for expr in the
current formatting context, e.g. ``%(py0)s``. The placeholder
and expr are placed in the current format context so that it
can be used on the next call to .pop_format_context().
"""
specifier = "py" + str(next(self.variable_counter))
self.explanation_specifiers[specifier] = expr
return "%(" + specifier + ")s"
def push_format_context(self):
"""Create a new formatting context.
The format context is used for when an explanation wants to
have a variable value formatted in the assertion message. In
this case the value required can be added using
.explanation_param(). Finally .pop_format_context() is used
to format a string of %-formatted values as added by
.explanation_param().
"""
self.explanation_specifiers = {}
self.stack.append(self.explanation_specifiers)
def pop_format_context(self, expl_expr):
"""Format the %-formatted string with current format context.
The expl_expr should be an ast.Str instance constructed from
the %-placeholders created by .explanation_param(). This will
add the required code to format said string to .on_failure and
return the ast.Name instance of the formatted string.
"""
current = self.stack.pop()
if self.stack:
self.explanation_specifiers = self.stack[-1]
keys = [ast.Str(key) for key in current.keys()]
format_dict = ast.Dict(keys, list(current.values()))
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
name = "@py_format" + str(next(self.variable_counter))
self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
return ast.Name(name, ast.Load())
def generic_visit(self, node):
"""Handle expressions we don't have custom code for."""
assert isinstance(node, ast.expr)
res = self.assign(node)
return res, self.explanation_param(self.display(res))
def visit_Assert(self, assert_):
"""Return the AST statements to replace the ast.Assert instance.
This re-writes the test of an assertion to provide
intermediate values and replace it with an if statement which
raises an assertion error with a detailed explanation in case
the expression is false.
"""
self.statements = []
self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
self.push_format_context()
# Rewrite assert into a bunch of statements.
top_condition, explanation = self.visit(assert_.test)
# Create failure message.
body = self.on_failure
negation = ast.UnaryOp(ast.Not(), top_condition)
self.statements.append(ast.If(negation, body, []))
if assert_.msg:
assertmsg = self.helper('format_assertmsg', assert_.msg)
explanation = "\n>assert " + explanation
else:
assertmsg = ast.Str("")
explanation = "assert " + explanation
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
msg = self.pop_format_context(template)
fmt = self.helper("format_explanation", msg)
err_name = ast.Name("AssertionError", ast.Load())
exc = ast_Call(err_name, [fmt], [])
if sys.version_info[0] >= 3:
raise_ = ast.Raise(exc, None)
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
# Clear temporary variables by setting them to None.
if self.variables:
variables = [ast.Name(name, ast.Store())
for name in self.variables]
clear = ast.Assign(variables, _NameConstant(None))
self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
return self.statements
def visit_Name(self, name):
# Display the repr of the name if it's a local variable or
# _should_repr_global_name() thinks it's acceptable.
locs = ast_Call(self.builtin("locals"), [], [])
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
dorepr = self.helper("should_repr_global_name", name)
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
res_var = self.variable()
expl_list = self.assign(ast.List([], ast.Load()))
app = ast.Attribute(expl_list, "append", ast.Load())
is_or = int(isinstance(boolop.op, ast.Or))
body = save = self.statements
fail_save = self.on_failure
levels = len(boolop.values) - 1
self.push_format_context()
# Process each operand, short-circuting if needed.
for i, v in enumerate(boolop.values):
if i:
fail_inner = []
# cond is set in a prior loop iteration below
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
self.on_failure = fail_inner
self.push_format_context()
res, expl = self.visit(v)
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
expl_format = self.pop_format_context(ast.Str(expl))
call = ast_Call(app, [expl_format], [])
self.on_failure.append(ast.Expr(call))
if i < levels:
cond = res
if is_or:
cond = ast.UnaryOp(ast.Not(), cond)
inner = []
self.statements.append(ast.If(cond, inner, []))
self.statements = body = inner
self.statements = save
self.on_failure = fail_save
expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_res, operand_expl = self.visit(unary.operand)
res = self.assign(ast.UnaryOp(unary.op, operand_res))
return res, pattern % (operand_expl,)
def visit_BinOp(self, binop):
symbol = binop_map[binop.op.__class__]
left_expr, left_expl = self.visit(binop.left)
right_expr, right_expl = self.visit(binop.right)
explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
return res, explanation
def visit_Call_35(self, call):
"""
visit `ast.Call` nodes on Python3.5 and after
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
for arg in call.args:
res, expl = self.visit(arg)
arg_expls.append(expl)
new_args.append(res)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl)
else: ## **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
def visit_Starred(self, starred):
# From Python 3.5, a Starred node can appear in a function call
res, expl = self.visit(starred.value)
return starred, '*' + expl
def visit_Call_legacy(self, call):
"""
visit `ast.Call nodes on 3.4 and below`
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
new_star = new_kwarg = None
for arg in call.args:
res, expl = self.visit(arg)
new_args.append(res)
arg_expls.append(expl)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
arg_expls.append(keyword.arg + "=" + expl)
if call.starargs:
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs,
new_star, new_kwarg)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
# ast.Call signature changed on 3.5,
# conditionally change which methods is named
# visit_Call depending on Python version
if sys.version_info >= (3, 5):
visit_Call = visit_Call_35
else:
visit_Call = visit_Call_legacy
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
value, value_expl = self.visit(attr.value)
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
res_expl = self.explanation_param(self.display(res))
pat = "%s\n{%s = %s.%s\n}"
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
return res, expl
def visit_Compare(self, comp):
self.push_format_context()
left_res, left_expl = self.visit(comp.left)
res_variables = [self.variable() for i in range(len(comp.ops))]
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
expls = []
syms = []
results = [left_res]
for i, op, next_operand in it:
next_res, next_expl = self.visit(next_operand)
results.append(next_res)
sym = binop_map[op.__class__]
syms.append(ast.Str(sym))
expl = "%s %s %s" % (left_expl, sym, next_expl)
expls.append(ast.Str(expl))
res_expr = ast.Compare(left_res, [op], [next_res])
self.statements.append(ast.Assign([store_names[i]], res_expr))
left_res, left_expl = next_res, next_expl
# Use pytest.assertion.util._reprcompare if that's available.
expl_call = self.helper("call_reprcompare",
ast.Tuple(syms, ast.Load()),
ast.Tuple(load_names, ast.Load()),
ast.Tuple(expls, ast.Load()),
ast.Tuple(results, ast.Load()))
if len(comp.ops) > 1:
res = ast.BoolOp(ast.And(), load_names)
else:
res = load_names[0]
return res, self.explanation_param(self.pop_format_context(expl_call))
| mit |
wainersm/buildbot | www/base/setup.py | 20 | 1690 | #!/usr/bin/env python
#
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
from buildbot_pkg import setup_www_plugin
import mock # noqa
import buildbot # noqa
except ImportError:
import sys
print("Please install buildbot, buildbot_pkg, and mock modules in order to install that package, or use the pre-build .whl modules available on pypi", file=sys.stderr)
sys.exit(1)
setup_www_plugin(
name='buildbot-www',
description='Buildbot UI',
author=u'Pierre Tardy',
author_email=u'tardyp@gmail.com',
setup_requires=['buildbot', 'buildbot_pkg', 'mock'],
url='http://buildbot.net/',
license='GNU GPL',
packages=['buildbot_www'],
package_data={
'': [
'VERSION',
'static/*',
'static/img/*',
'static/fonts/*',
]
},
entry_points="""
[buildbot.www]
base = buildbot_www:ep
"""
)
| gpl-2.0 |
eloylp/scirocco-pyclient | test/unit/messages_test.py | 1 | 5157 | import unittest
import datetime
from sciroccoclient.exceptions import SciroccoInvalidMessageScheduleTimeError, SciroccoInvalidMessageStatusError, \
SciroccoInvalidMessageError, SciroccoInvalidMessageDestinationError, SciroccoInvalidMessageDataError
from sciroccoclient.messages import SciroccoMessage, SciroccoMessageValidator
class SciroccoMessageTest(unittest.TestCase):
def setUp(self):
self.message = SciroccoMessage()
def test_node_destination_method_exists(self):
self.assertTrue("node_destination" in dir(self.message))
def test_status_method_exists(self):
self.assertTrue("status" in dir(self.message))
def test_payload_method_exists(self):
self.assertTrue("payload" in dir(self.message))
def test_data_type_method_exists(self):
self.assertTrue("payload_type" in dir(self.message))
def test_scheduled_time_method_exists(self):
self.assertTrue("scheduled_time" in dir(self.message))
def test_that_all_properties_are_at_initial_state(self):
message = SciroccoMessage()
self.assertIsNone(message.node_destination)
self.assertIsNone(message.payload)
self.assertIsNone(message.payload_type)
self.assertIsNone(message.scheduled_time)
def test_that_setting_scheduled_time_also_sets_scheduled_status(self):
message = SciroccoMessage()
message.payload = 'data'
message.node_destination = 'af123'
message.scheduled_time = datetime.datetime.utcnow()
self.assertEqual(message.status, 'scheduled')
def test_that_pushing_scirocco_message_with_invalid_status_raises_exception(self):
message = SciroccoMessage()
message.payload = {"name": "message"}
message.node_destination = 'af123'
message.payload_type = '.extension'
self.assertRaises(SciroccoInvalidMessageStatusError, setattr, message, 'status', 'This is not a valid status.')
def test_that_pushing_scirocco_message_with_invalid_scheduled_time_raises_exception(self):
message = SciroccoMessage()
message.payload = {"name": "message"}
message.node_destination = 'af123'
message.payload_type = '.extension'
self.assertRaises(SciroccoInvalidMessageScheduleTimeError, setattr, message, 'scheduled_time',
'This is not an instance so must raise SciroccoInvalidMessageScheduleTimeError.')
class SciroccoMessageValidatorTest(unittest.TestCase):
def setUp(self):
self.validator = SciroccoMessageValidator()
def test_that_check_exists(self):
self.assertTrue("check" in dir(self.validator))
def test_that_check_destination_exists(self):
self.assertTrue("check_node_destination" in dir(self.validator))
def test_that_check_status_exists(self):
self.assertTrue("check_status" in dir(self.validator))
def test_that_check_payload_exists(self):
self.assertTrue("check_payload" in dir(self.validator))
def test_check_raises_invalid_message(self):
message = "tHIS IS AN INCORRECT MESSAGE TYPE"
self.assertRaises(SciroccoInvalidMessageError, self.validator.check, message)
def test_check_raises_invalid_node_destination(self):
message = SciroccoMessage()
self.assertRaises(SciroccoInvalidMessageDestinationError, self.validator.check, message)
def test_check_raises_invalid_message_status(self):
message = SciroccoMessage()
message.node_destination = 'af123'
message._status = 'novalid status'
self.assertRaises(SciroccoInvalidMessageStatusError, self.validator.check, message)
def test_check_raises_invalid_payload(self):
message = SciroccoMessage()
message.node_destination = 'af123'
self.assertRaises(SciroccoInvalidMessageDataError, self.validator.check, message)
def test_message_is_instance_of_scirocco_message(self):
message = "This message is a string"
self.assertFalse(self.validator.check_message(message))
def test_destination_cannot_be_none(self):
message = SciroccoMessage()
message.payload = 'asdas'
self.assertFalse(self.validator.check_node_destination(message))
def test_status_cannot_be_none(self):
message = SciroccoMessage()
message.payload = 'asdas'
message._status = None
self.assertFalse(self.validator.check_status(message))
def test_status_cannot_be_random(self):
message = SciroccoMessage()
message._status = 'asdadasd'
self.assertFalse(self.validator.check_status(message))
def test_status_can_be_scheduled(self):
message = SciroccoMessage()
message.status = 'scheduled'
self.assertTrue(self.validator.check_status(message))
def test_status_can_be_pending(self):
message = SciroccoMessage()
message.status = 'pending'
self.assertTrue(self.validator.check_status(message))
def test_payload_cannot_be_none(self):
message = SciroccoMessage()
message.node_destination = 'af123'
self.assertFalse(self.validator.check_payload(message))
| agpl-3.0 |
3dfxsoftware/cbss-addons | TODO-7.0/sneldev_magento/wizard/sneldev_magento_stock_init.py | 4 | 1636 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pooler
import os
from export_tools import *
from osv import osv, fields
class wiz_sneldev_stock_init(osv.osv_memory):
_name = 'sneldev.stock.init'
_description = 'Initialize stock from Magento'
_columns = {
}
_defaults = {
}
def do_stock_init(self, cr, uid, ids, context=None):
if (self.pool.get('sneldev.magento').init_stock(cr, uid) < 0):
raise osv.except_osv(('Warning'), ('Init failed, please refer to log file for failure details.'))
return {'type': 'ir.actions.act_window_close'}
wiz_sneldev_stock_init()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| gpl-2.0 |
hzy/raven-python | raven/utils/wsgi.py | 25 | 3343 | """
This module implements WSGI related helpers adapted from ``werkzeug.wsgi``
:copyright: (c) 2010 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from raven.utils import six
from raven.utils.compat import urllib_quote
# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
def get_headers(environ):
"""
Returns only proper HTTP headers.
"""
for key, value in six.iteritems(environ):
key = str(key)
if key.startswith('HTTP_') and key not in \
('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
yield key[5:].replace('_', '-').title(), value
elif key in ('CONTENT_TYPE', 'CONTENT_LENGTH'):
yield key.replace('_', '-').title(), value
def get_environ(environ):
"""
Returns our whitelisted environment variables.
"""
for key in ('REMOTE_ADDR', 'SERVER_NAME', 'SERVER_PORT'):
if key in environ:
yield key, environ[key]
# `get_host` comes from `werkzeug.wsgi`
def get_host(environ):
"""Return the real host for the given WSGI environment. This takes care
of the `X-Forwarded-Host` header.
:param environ: the WSGI environment to get the host of.
"""
scheme = environ.get('wsgi.url_scheme')
if 'HTTP_X_FORWARDED_HOST' in environ:
result = environ['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in environ:
result = environ['HTTP_HOST']
else:
result = environ['SERVER_NAME']
if (scheme, str(environ['SERVER_PORT'])) not \
in (('https', '443'), ('http', '80')):
result += ':' + environ['SERVER_PORT']
if result.endswith(':80') and scheme == 'http':
result = result[:-3]
elif result.endswith(':443') and scheme == 'https':
result = result[:-4]
return result
# `get_current_url` comes from `werkzeug.wsgi`
def get_current_url(environ, root_only=False, strip_querystring=False,
host_only=False):
"""A handy helper function that recreates the full URL for the current
request or parts of it. Here an example:
>>> from werkzeug import create_environ
>>> env = create_environ("/?param=foo", "http://localhost/script")
>>> get_current_url(env)
'http://localhost/script/?param=foo'
>>> get_current_url(env, root_only=True)
'http://localhost/script/'
>>> get_current_url(env, host_only=True)
'http://localhost/'
>>> get_current_url(env, strip_querystring=True)
'http://localhost/script/'
:param environ: the WSGI environment to get the current URL from.
:param root_only: set `True` if you only want the root URL.
:param strip_querystring: set to `True` if you don't want the querystring.
:param host_only: set to `True` if the host URL should be returned.
"""
tmp = [environ['wsgi.url_scheme'], '://', get_host(environ)]
cat = tmp.append
if host_only:
return ''.join(tmp) + '/'
cat(urllib_quote(environ.get('SCRIPT_NAME', '').rstrip('/')))
if root_only:
cat('/')
else:
cat(urllib_quote('/' + environ.get('PATH_INFO', '').lstrip('/')))
if not strip_querystring:
qs = environ.get('QUERY_STRING')
if qs:
cat('?' + qs)
return ''.join(tmp)
| bsd-3-clause |
stoneworksolutions/redongo | redongo/redongo_server.py | 1 | 20796 | import datetime
import logging
import logging.handlers
import os
import pymongo
import redis
from redis.sentinel import Sentinel
import server_exceptions
import general_exceptions
import signal
import stoneredis
import sys
import time
import traceback
import utils
import cipher_utils
import serializer_utils
import queue_utils
from optparse import OptionParser
from pymongo.errors import DuplicateKeyError, PyMongoError
from twisted.internet import reactor
from twisted.internet.error import ReactorNotRunning
from twisted.internet.task import LoopingCall
try:
from bson.objectid import ObjectId
from bson.errors import InvalidId
from bson.errors import InvalidDocument
except ImportError:
from pymongo.objectid import ObjectId, InvalidId, InvalidDocument
try:
import cPickle as pickle
except:
import pickle
# LOGGER CONFIG
FACILITY = "local0"
logging.basicConfig()
logger = logging.getLogger()
formatter = logging.Formatter('PID:%(process)s %(filename)s %(funcName)s %(levelname)s %(message)s')
rs = None
options = None
args = None
run_stopped = False
class RedongoServer(object):
def __init__(self, mode, *args, **kwargs):
def __get_sk__():
result = self.redis.get('redongo_sk')
if not result:
result = os.urandom(16)
self.redis.set('redongo_sk', result)
return result
logger.info('Starting Redongo Server..')
self.mode = mode
self.create_redis_connection()
self.keep_going = True
self.redisQueue = options.redisQueue
self.popSize = int(options.popSize)
self.redisQueueSize = int(options.redisQueueSize)
self.bulks = {}
self.completed_bulks = set()
self.objs = []
self.cipher = cipher_utils.AESCipher(__get_sk__())
disk_queue_load_time = time.time()
logger.info('Loading disk queues...')
self.disk_queue = queue_utils.Queue(queue_name=options.diskQueue)
logger.info('Loading disk queue took {0}'.format(time.time() - disk_queue_load_time))
ret_disk_queue_load_time = time.time()
self.returned_disk_queue = queue_utils.Queue(queue_name='{0}_returned'.format(options.diskQueue))
logger.info('Loading returned disk queue took {0}'.format(time.time() - ret_disk_queue_load_time))
self.lock_key = '{0}_LOCK'.format(self.redisQueue)
def create_redis_connection(self):
if self.mode == 'Redis':
self.redis = stoneredis.StoneRedis(options.redisIP, db=options.redisDB, port=options.redisPort, socket_connect_timeout=5, socket_timeout=5)
self.redis.connect()
else:
SENTINEL_POOL = Sentinel(
options.sentinelServers,
socket_timeout=0.1,
max_connections=1000,
)
self.redis = SENTINEL_POOL.master_for(
options.sentinelName,
redis_class=stoneredis.StoneRedis,
socket_timeout=5,
socket_connect_timeout=5,
)
def check_object(self, obj):
if type(obj) != list or len(obj) != 2:
raise server_exceptions.ObjectValidationError('Type not valid')
def get_application_settings(self, application_name):
return utils.get_application_settings(application_name, self.redis)
def save_to_failed_queue(self, application_name, bulk):
i = 0
for obj, command, original_object in bulk['data']:
self.redis.rpush('{0}_FAILED'.format(self.redisQueue), original_object)
i += 1
logger.warning('Moved {0} objects from application {1} to queue {2}_FAILED'.format(i, application_name, self.redisQueue))
def run(self):
global run_stopped
first_run = True
try:
logger.info('Running!')
while self.keep_going:
object_found = False
lock = self.redis.wait_for_lock(self.lock_key, 60, auto_renewal=True)
if first_run:
while self.returned_disk_queue._length > 0:
self.objs.append(self.returned_disk_queue.pop())
object_found = True
first_run = False
if object_found:
logger.info('Got {0} objects from returned disk queue {1}'.format(len(self.objs), self.returned_disk_queue._disk_queue_name))
if self.disk_queue._length > 0:
for i in range(0, self.popSize):
if self.disk_queue._length:
self.objs.append(self.disk_queue.pop())
object_found = True
logger.debug('Got {0} objects from disk queue {1}'.format(len(self.objs), self.disk_queue._disk_queue_name))
else:
break
else:
try:
self.objs.append(self.redis.blpop(self.redisQueue)[1])
logger.debug('Got {0} objects from redis queue {1}'.format(len(self.objs), self.redisQueue))
object_found = True
except redis.TimeoutError:
pass
if object_found:
self.objs.extend(self.redis.multi_lpop(self.redisQueue, self.popSize-1))
if lock:
self.redis.release_lock(lock)
if object_found:
while self.objs:
try:
orig_obj = self.objs.pop(0)
obj = pickle.loads(orig_obj)
try:
self.check_object(obj)
application_settings = self.get_application_settings(obj[0][0])
except (server_exceptions.ObjectValidationError, general_exceptions.ApplicationSettingsError), e:
logger.error('Discarding {0} object because of {1}'.format(obj[0], e))
continue
application_bulk = self.bulks.setdefault(obj[0][0], {'serializer': obj[0][1], 'data': []})
application_bulk.setdefault('inserted_date', datetime.datetime.utcnow())
application_bulk.update(application_settings)
ser = serializer_utils.serializer(obj[0][1])
obj_data = ser.loads(obj[1])
application_bulk['data'].append((self.normalize_object(obj_data), obj[0][2], orig_obj))
except (ValueError, TypeError, IndexError, ImportError, pickle.PickleError), e:
logger.error('Discarding {0} object because of {1}'.format(orig_obj, e))
continue
while self.completed_bulks:
self.consume_application(self.completed_bulks.pop())
# Guarantee that the looping call can access the lock
time.sleep(.05)
logger.info('Setting run_stopped to True')
run_stopped = True
except:
logger.error('Stopping redongo because unexpected exception: {0}'.format(traceback.format_exc()))
logger.info('Setting run_stopped to True')
run_stopped = True
stopApp()
def back_to_disk(self):
logger.info('Returning memory data to Disk Queue')
objects_returned = 0
for application_name, bulk in self.bulks.iteritems():
for obj, command, original_object in bulk['data']:
self.returned_disk_queue.push(original_object)
objects_returned += 1
logger.info('{0} objects returned to Disk Queue'.format(objects_returned))
def get_mongo_collection(self, bulk):
mongo_client = pymongo.MongoClient('mongodb://{0}:{1}@{2}:{3}/{4}'.format(bulk['mongo_user'], self.cipher.decrypt(bulk['mongo_password']), bulk['mongo_host'], bulk['mongo_port'], bulk['mongo_database']))
mongo_db = mongo_client[bulk['mongo_database']]
collection = mongo_db[bulk['mongo_collection']]
return collection
def normalize_object(self, obj):
objectid_fields = obj.pop('objectid_fields', [])
for f in objectid_fields:
if obj.get(f, None):
try:
obj[f] = ObjectId(obj[f])
except InvalidId:
pass
except TypeError:
pass
return obj
def deal_with_mongo(self, application_name):
bulk = self.bulks[application_name]
set_of_objects = []
to_failed = []
result = None
try:
collection = self.get_mongo_collection(bulk)
except (PyMongoError, InvalidDocument), e:
logger.error('Not saving bulk {0} (moving to failed queue) from application {1} due to connection bad data: {2}'.format(bulk, application_name, e))
self.save_to_failed_queue(application_name, bulk)
return
# Separates objects with different commands. When appears any object with other command, executes current command for all readed objects
current_command = bulk['data'][0][1]
while bulk['data']:
obj, command, original_object = bulk['data'].pop(0)
if command == current_command:
set_of_objects.append((obj, command, original_object))
else:
# Execute command for all readed objects
if current_command == 'save':
result = self.save_to_mongo(collection, set_of_objects)
elif current_command == 'add':
result = self.add_in_mongo(collection, set_of_objects)
# Notify on failure
if result:
logger.error('Not saving {0} objects (moving to failed queue) from application {1} due to connection bad data'.format(len(result), application_name))
to_failed += result
current_command = command
set_of_objects = [(obj, command, original_object)]
# Last set
if current_command == 'save':
result = self.save_to_mongo(collection, set_of_objects)
elif current_command == 'add':
result = self.add_in_mongo(collection, set_of_objects)
# Notify on failure
if result:
logger.error('Not saving {0} objects (moving to failed queue) from application {1} due to connection bad data'.format(len(result), application_name))
to_failed += result
# If an error occurred, it notifies and inserts the required objects
if to_failed:
bulk['data'] = to_failed
self.save_to_failed_queue(application_name, bulk)
def save_to_mongo(self, collection, objs):
to_insert = []
to_update = []
to_failed = []
differents = set()
while objs:
full_object = objs.pop(0)
if '_id' not in full_object[0]:
to_insert.append(full_object)
elif full_object[0]['_id'] not in differents:
to_insert.append(full_object)
differents.add(full_object[0]['_id'])
else:
to_update.append(full_object)
# Bulk insert
try:
collection.insert(map(lambda x: x[0], to_insert))
except DuplicateKeyError:
to_update = to_insert + to_update
except (PyMongoError, InvalidDocument):
to_failed.extend(to_insert)
# One-to-one update
while to_update:
full_obj = to_update.pop(0)
try:
collection.update({'_id': full_obj[0]['_id']}, full_obj[0])
except (PyMongoError, InvalidDocument):
to_failed.extend(to_update)
# Return unsaved objects
return to_failed
def create_add_query(self, obj, previous_field='', query={}):
for field, value in obj.iteritems():
if field == '_id':
continue
type_field = type(value)
# Numeric and logical fields perform an addition (Complex number are not supported by mongo)
if type_field is int or type_field is long or type_field is float or type_field is bool: # type_field is complex:
x = query.setdefault('$inc', {})
x[previous_field + field] = value
# String fields perform a set
elif type_field is str:
x = query.setdefault('$set', {})
x[previous_field + field] = value
# List fields perform a concatenation
elif type_field is list:
x = query.setdefault('$push', {})
x[previous_field + field] = {'$each': value}
# Dict fields will be treated as the original object
elif type_field is dict:
query = self.create_add_query(value, '{0}{1}.'.format(previous_field, field), query)
else:
query.setdefault('$set', {(previous_field + field): value})
return query
def add_in_mongo(self, collection, objs):
to_failed = []
# One-to-one update
while objs:
full_object = objs.pop(0)
obj = full_object[0]
try:
collection.update({'_id': obj['_id']}, self.create_add_query(obj), upsert=True)
except (PyMongoError, InvalidDocument):
to_failed.append(full_object)
# Return unadded objects and info
return to_failed
def consume_application(self, application_name):
# In case that check_completed_bulks reads while main thread was saving on previous iteration
if application_name in self.bulks:
self.deal_with_mongo(application_name)
self.bulks.pop(application_name)
def check_completed_bulks(self):
try:
for application_name, bulk in self.bulks.items():
if len(bulk['data']) >= bulk['bulk_size'] or bulk['inserted_date'] + datetime.timedelta(seconds=bulk['bulk_expiration']) <= datetime.datetime.utcnow():
self.completed_bulks.add(application_name)
except:
stopApp()
def check_redis_queue(self):
try:
if self.redis.llen(self.redisQueue) > self.redisQueueSize or self.disk_queue._length > 0:
to_disk_queue = []
object_found = False
lock = self.redis.wait_for_lock(self.lock_key, 60, auto_renewal=True)
while self.redis.llen(self.redisQueue) > self.redisQueueSize:
try:
to_disk_queue.append(self.redis.blpop(self.redisQueue)[1])
object_found = True
except redis.TimeoutError:
pass
if object_found:
to_disk_queue.extend(self.redis.multi_lpop(self.redisQueue, self.popSize-1))
self.save_to_disk_queue(to_disk_queue)
self.redis.release_lock(lock)
except redis.TimeoutError:
pass
def save_to_disk_queue(self, objs):
while objs:
obj = objs.pop(0)
self.disk_queue.push(obj)
def close_disk_queues(self):
try:
self.disk_queue.close()
except:
logger.error('Could not close disk queue {0}: {1}'.format(self.disk_queue._disk_queue_name, traceback.format_exc()))
try:
self.returned_disk_queue.close()
except:
logger.error('Could not close disk queue {0}: {1}'.format(self.returned_disk_queue._disk_queue_name, traceback.format_exc()))
def sigtermHandler():
global rs
global run_stopped
rs.keep_going = False
logger.info('Waiting for run_stopped')
while not run_stopped:
time.sleep(0.1)
rs.back_to_disk()
rs.close_disk_queues()
logger.info('Exiting program!')
def stopApp():
global run_stopped
logger.info('Stopping app')
try:
reactor.stop()
except ReactorNotRunning:
run_stopped = True
def closeApp(signum, frame):
logger.info('Received signal {0}'.format(signum))
stopApp()
def validate(parser, options, required_options, silent=True):
for required_option in filter(lambda x: x.__dict__['metavar'] in required_options, parser.option_list):
if not getattr(options, required_option.dest):
if not silent:
logger.error('Option {0} not found'.format(required_option.metavar))
return False
return True
def validateRedisClient(parser, options):
required_options = ['REDIS', 'REDIS_DB']
return validate(parser, options, required_options, silent=False)
def validateSentinelClient(parser, options):
required_options = ['SENTINEL_SERVERS', 'SENTINEL_NAME']
return validate(parser, options, required_options, silent=False)
def validateArgs(parser, options):
if validateRedisClient(parser, options):
mode = 'Redis'
elif validateSentinelClient(parser, options):
mode = 'Sentinel'
else:
logger.error('Parameters for Redis connection not valid!\n\tUse -r HOST -d DB for Standard Redis mode\n\tUse -n GROUP NAME -S host1 port1 -S host2 port2 .. -S hostN:portN for Sentinel mode')
sys.exit(-1)
required_options = ['REDIS_QUEUE']
if not validate(parser, options, required_options, silent=False):
sys.exit(-1)
return mode
def main():
global rs
global options
global args
global logger
parser = OptionParser(description='Startup options')
parser.add_option('--redis', '-r', dest='redisIP', help='Redis server IP Address', metavar='REDIS')
parser.add_option('--redisdb', '-d', dest='redisDB', help='Redis server DB', metavar='REDIS_DB')
parser.add_option('--redisqueue', '-q', dest='redisQueue', help='Redis Queue', metavar='REDIS_QUEUE')
parser.add_option('--popsize', '-p', dest='popSize', help='Redis Pop Size', metavar='REDIS_POP_SIZE', default=1000)
parser.add_option('--port', '-P', dest='redisPort', help='Redis Port', metavar='REDIS_PORT', default=6379)
parser.add_option('--sentinelservers', '-S', dest='sentinelServers', help='Sentinel Servers (-S host1 port1 -S host2 port2 .. -S hostN portN)', metavar='SENTINEL_SERVERS', action='append', nargs=2)
parser.add_option('--sentinelname', '-n', dest='sentinelName', help='Sentinel Group Name', metavar='SENTINEL_NAME')
parser.add_option('--queuesize', '-s', dest='redisQueueSize', help='Max Redis Queue Size', metavar='REDIS_QUEUE_SIZE', default=10000)
parser.add_option('--diskqueue', '-Q', dest='diskQueue', help='Disk Queue', metavar='DISK_QUEUE', default='redongo_disk_queue')
parser.add_option('--logger', '-L', dest='logger', help='Logger Usage', metavar='LOGGER_USAGE', default='1')
parser.add_option('--log', '-l', dest='logLevel', help='Logger Level', metavar='LOG_LEVEL', default='debug')
(options, args) = parser.parse_args()
logger.setLevel(getattr(logging, options.logLevel.upper(), 'DEBUG'))
mode = validateArgs(parser, options)
# With this line the logs are sent to syslog.
if options.logger != '0':
handler = logging.handlers.SysLogHandler("/dev/log", FACILITY)
handler.setFormatter(formatter)
logger.addHandler(handler)
signal.signal(signal.SIGHUP, closeApp)
signal.signal(signal.SIGTERM, closeApp)
signal.signal(signal.SIGINT, closeApp)
signal.signal(signal.SIGALRM, closeApp)
# Handler for SIGTERM
reactor.addSystemEventTrigger('before', 'shutdown', sigtermHandler)
rs = RedongoServer(mode)
lc = LoopingCall(rs.check_completed_bulks)
lc.start(1, now=False)
lc_redis_queue = LoopingCall(rs.check_redis_queue)
lc_redis_queue.start(1, now=False)
reactor.callInThread(rs.run)
# Start the reactor
reactor.run(installSignalHandlers=False)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
sys.exit(25)
| mit |
gangadharkadam/letzerp | erpnext/setup/doctype/global_defaults/global_defaults.py | 3 | 2265 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
"""Global Defaults"""
import frappe
import frappe.defaults
from frappe.utils import cint
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
keydict = {
# "key in defaults": "key in Global Defaults"
"print_style": "print_style",
"fiscal_year": "current_fiscal_year",
'company': 'default_company',
'currency': 'default_currency',
"country": "country",
'hide_currency_symbol':'hide_currency_symbol',
'account_url':'account_url',
'disable_rounded_total': 'disable_rounded_total',
}
from frappe.model.document import Document
class GlobalDefaults(Document):
def on_update(self):
"""update defaults"""
for key in keydict:
frappe.db.set_default(key, self.get(keydict[key], ''))
# update year start date and year end date from fiscal_year
year_start_end_date = frappe.db.sql("""select year_start_date, year_end_date
from `tabFiscal Year` where name=%s""", self.current_fiscal_year)
if year_start_end_date:
ysd = year_start_end_date[0][0] or ''
yed = year_start_end_date[0][1] or ''
if ysd and yed:
frappe.db.set_default('year_start_date', ysd.strftime('%Y-%m-%d'))
frappe.db.set_default('year_end_date', yed.strftime('%Y-%m-%d'))
# enable default currency
if self.default_currency:
frappe.db.set_value("Currency", self.default_currency, "enabled", 1)
self.toggle_rounded_total()
# clear cache
frappe.clear_cache()
def get_defaults(self):
return frappe.defaults.get_defaults()
def toggle_rounded_total(self):
self.disable_rounded_total = cint(self.disable_rounded_total)
# Make property setters to hide rounded total fields
for doctype in ("Quotation", "Sales Order", "Sales Invoice", "Delivery Note"):
make_property_setter(doctype, "rounded_total", "hidden", self.disable_rounded_total, "Check")
make_property_setter(doctype, "rounded_total", "print_hide", 1, "Check")
make_property_setter(doctype, "rounded_total_export", "hidden", self.disable_rounded_total, "Check")
make_property_setter(doctype, "rounded_total_export", "print_hide", self.disable_rounded_total, "Check")
| agpl-3.0 |
benoitsteiner/tensorflow-opencl | tensorflow/contrib/distributions/python/kernel_tests/bijectors/chain_test.py | 70 | 3480 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Chain Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops.bijectors.chain import Chain
from tensorflow.contrib.distributions.python.ops.bijectors.exp import Exp
from tensorflow.contrib.distributions.python.ops.bijectors.softmax_centered import SoftmaxCentered
from tensorflow.contrib.distributions.python.ops.bijectors.softplus import Softplus
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency
from tensorflow.python.platform import test
class ChainBijectorTest(test.TestCase):
"""Tests the correctness of the Y = Chain(bij1, bij2, bij3) transformation."""
def testBijector(self):
with self.test_session():
chain = Chain((Exp(event_ndims=1), Softplus(event_ndims=1)))
self.assertEqual("chain_of_exp_of_softplus", chain.name)
x = np.asarray([[[1., 2.],
[2., 3.]]])
self.assertAllClose(1. + np.exp(x), chain.forward(x).eval())
self.assertAllClose(np.log(x - 1.), chain.inverse(x).eval())
self.assertAllClose(
-np.sum(np.log(x - 1.), axis=2),
chain.inverse_log_det_jacobian(x).eval())
self.assertAllClose(
np.sum(x, axis=2), chain.forward_log_det_jacobian(x).eval())
def testBijectorIdentity(self):
with self.test_session():
chain = Chain()
self.assertEqual("identity", chain.name)
x = np.asarray([[[1., 2.],
[2., 3.]]])
self.assertAllClose(x, chain.forward(x).eval())
self.assertAllClose(x, chain.inverse(x).eval())
self.assertAllClose(0., chain.inverse_log_det_jacobian(x).eval())
self.assertAllClose(0., chain.forward_log_det_jacobian(x).eval())
def testScalarCongruency(self):
with self.test_session():
bijector = Chain((Exp(), Softplus()))
assert_scalar_congruency(
bijector, lower_x=1e-3, upper_x=1.5, rtol=0.05)
def testShapeGetters(self):
with self.test_session():
bijector = Chain([
SoftmaxCentered(
event_ndims=1, validate_args=True),
SoftmaxCentered(
event_ndims=0, validate_args=True)
])
x = tensor_shape.TensorShape([])
y = tensor_shape.TensorShape([2 + 1])
self.assertAllEqual(y, bijector.forward_event_shape(x))
self.assertAllEqual(
y.as_list(),
bijector.forward_event_shape_tensor(x.as_list()).eval())
self.assertAllEqual(x, bijector.inverse_event_shape(y))
self.assertAllEqual(
x.as_list(),
bijector.inverse_event_shape_tensor(y.as_list()).eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
Mazecreator/tensorflow | tensorflow/python/keras/_impl/keras/engine/__init__.py | 34 | 1403 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Keras Engine: graph topology and training loop functionality.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras.engine.topology import get_source_inputs
from tensorflow.python.keras._impl.keras.engine.topology import Input
from tensorflow.python.keras._impl.keras.engine.topology import InputLayer
from tensorflow.python.keras._impl.keras.engine.topology import InputSpec
from tensorflow.python.keras._impl.keras.engine.topology import Layer
from tensorflow.python.keras._impl.keras.engine.training import Model
# Note: topology.Node is an internal class,
# it isn't meant to be used by Keras users.
| apache-2.0 |
gingerwizard/examples | Miscellaneous/custom_tile_maps/generate_random_data.py | 3 | 2209 | import argparse
import random
from elasticsearch import Elasticsearch
from elasticsearch.helpers import streaming_bulk
lat_lower = 9.0
lat_upper = 18.0
lon_upper = 41.0
lon_lower = 12.0
parser = argparse.ArgumentParser()
parser.add_argument("--es_host", default="localhost:9200", help="ES Connection String")
parser.add_argument("--es_user", default="elastic", help="ES User")
parser.add_argument("--es_password", default="changeme", help="ES Password")
parser.add_argument("--num_centroids", default=10, type=int, help="Number of Centroids")
parser.add_argument("--min_per_centroid", default=10, type=int, help="Min doc per centroid")
parser.add_argument("--max_per_centroid", default=5000, type=int, help="Max doc per centroid")
options = parser.parse_args()
mapping = {
"mappings": {
"point": {
"properties": {
"location": {
"type": "geo_point"
}
}
}
},
"settings": {
"index": {
"number_of_shards": "1",
"number_of_replicas": "0",
"refresh_interval":"1s"
}
}
}
def generate_documents(num_centroids,min_docs,max_docs):
for i in range(num_centroids):
print("Producing docs for centroid %s"%i)
lat = random.uniform(lat_lower,lat_upper)
lon = random.uniform(lon_lower,lon_upper)
num_docs = random.randint(min_docs,max_docs)
print("%s docs in centroid %s"%(i,num_docs))
for i in range(num_docs):
yield {
"location":{
"lat":lat,
"lon":lon
}
}
es = Elasticsearch(hosts=[options.es_host], http_auth = (options.es_user, options.es_password))
if es.indices.exists("elastic_on_simple"):
es.indices.delete("elastic_on_simple")
es.indices.create("elastic_on_simple",body=mapping)
cnt = 0
print("Indexing docs for %s centroids"%options.num_centroids)
for _ in streaming_bulk(
es,
generate_documents(options.num_centroids,options.min_per_centroid,options.max_per_centroid),
chunk_size=1,
doc_type='point',
index='elastic_on_simple'
):
cnt += 1
if cnt % 1000 == 0:
print("Indexed %s"%cnt)
print("Indexed %s"%cnt)
| apache-2.0 |
kaapstorm/trc_me | src/trc_me/accounts/models.py | 1 | 1727 | # This file is part of trc.me.
#
# trc.me is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# trc.me is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# trc.me. If not, see <http://www.gnu.org/licenses/>.
#
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from stdimage import StdImageField
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
has_valid_email = models.BooleanField(default=False)
points = models.PositiveIntegerField(default=0)
img = StdImageField(
#storage=restricted_fs,
upload_to='profile/%Y/%m/%d/',
size=(128, 181), # 128 * sqrt(2) = 181
thumbnail_size=(48, 48, True),
blank=True)
notify_by_email = models.BooleanField(default=True)
def __unicode__(self):
return unicode(self.user)
def get_absolute_url(self):
from trc_me.web.views import view_user
return reverse(view_user, kwargs={'username': self.user.username})
def create_profile(sender, **kw):
user = kw["instance"]
if kw["created"]:
profile = UserProfile(user=user)
profile.save()
post_save.connect(create_profile, sender=User)
| agpl-3.0 |
biodrone/plex-desk | desk/flask/lib/python3.4/site-packages/sqlparse/formatter.py | 118 | 5113 | # Copyright (C) 2008 Andi Albrecht, albrecht.andi@gmail.com
#
# This module is part of python-sqlparse and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php.
"""SQL formatter"""
from sqlparse import filters
from sqlparse.exceptions import SQLParseError
def validate_options(options):
"""Validates options."""
kwcase = options.get('keyword_case', None)
if kwcase not in [None, 'upper', 'lower', 'capitalize']:
raise SQLParseError('Invalid value for keyword_case: %r' % kwcase)
idcase = options.get('identifier_case', None)
if idcase not in [None, 'upper', 'lower', 'capitalize']:
raise SQLParseError('Invalid value for identifier_case: %r' % idcase)
ofrmt = options.get('output_format', None)
if ofrmt not in [None, 'sql', 'python', 'php']:
raise SQLParseError('Unknown output format: %r' % ofrmt)
strip_comments = options.get('strip_comments', False)
if strip_comments not in [True, False]:
raise SQLParseError('Invalid value for strip_comments: %r'
% strip_comments)
strip_ws = options.get('strip_whitespace', False)
if strip_ws not in [True, False]:
raise SQLParseError('Invalid value for strip_whitespace: %r'
% strip_ws)
truncate_strings = options.get('truncate_strings', None)
if truncate_strings is not None:
try:
truncate_strings = int(truncate_strings)
except (ValueError, TypeError):
raise SQLParseError('Invalid value for truncate_strings: %r'
% truncate_strings)
if truncate_strings <= 1:
raise SQLParseError('Invalid value for truncate_strings: %r'
% truncate_strings)
options['truncate_strings'] = truncate_strings
options['truncate_char'] = options.get('truncate_char', '[...]')
reindent = options.get('reindent', False)
if reindent not in [True, False]:
raise SQLParseError('Invalid value for reindent: %r'
% reindent)
elif reindent:
options['strip_whitespace'] = True
indent_tabs = options.get('indent_tabs', False)
if indent_tabs not in [True, False]:
raise SQLParseError('Invalid value for indent_tabs: %r' % indent_tabs)
elif indent_tabs:
options['indent_char'] = '\t'
else:
options['indent_char'] = ' '
indent_width = options.get('indent_width', 2)
try:
indent_width = int(indent_width)
except (TypeError, ValueError):
raise SQLParseError('indent_width requires an integer')
if indent_width < 1:
raise SQLParseError('indent_width requires an positive integer')
options['indent_width'] = indent_width
right_margin = options.get('right_margin', None)
if right_margin is not None:
try:
right_margin = int(right_margin)
except (TypeError, ValueError):
raise SQLParseError('right_margin requires an integer')
if right_margin < 10:
raise SQLParseError('right_margin requires an integer > 10')
options['right_margin'] = right_margin
return options
def build_filter_stack(stack, options):
"""Setup and return a filter stack.
Args:
stack: :class:`~sqlparse.filters.FilterStack` instance
options: Dictionary with options validated by validate_options.
"""
# Token filter
if options.get('keyword_case', None):
stack.preprocess.append(
filters.KeywordCaseFilter(options['keyword_case']))
if options.get('identifier_case', None):
stack.preprocess.append(
filters.IdentifierCaseFilter(options['identifier_case']))
if options.get('truncate_strings', None) is not None:
stack.preprocess.append(filters.TruncateStringFilter(
width=options['truncate_strings'], char=options['truncate_char']))
# After grouping
if options.get('strip_comments', False):
stack.enable_grouping()
stack.stmtprocess.append(filters.StripCommentsFilter())
if (options.get('strip_whitespace', False)
or options.get('reindent', False)):
stack.enable_grouping()
stack.stmtprocess.append(filters.StripWhitespaceFilter())
if options.get('reindent', False):
stack.enable_grouping()
stack.stmtprocess.append(
filters.ReindentFilter(char=options['indent_char'],
width=options['indent_width']))
if options.get('right_margin', False):
stack.enable_grouping()
stack.stmtprocess.append(
filters.RightMarginFilter(width=options['right_margin']))
# Serializer
if options.get('output_format'):
frmt = options['output_format']
if frmt.lower() == 'php':
fltr = filters.OutputPHPFilter()
elif frmt.lower() == 'python':
fltr = filters.OutputPythonFilter()
else:
fltr = None
if fltr is not None:
stack.postprocess.append(fltr)
return stack
| mit |
h3biomed/ansible | lib/ansible/modules/cloud/google/gcp_compute_address_facts.py | 9 | 6882 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_address_facts
description:
- Gather facts for GCP Address
short_description: Gather facts for GCP Address
version_added: 2.7
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
filters:
description:
- A list of filter value pairs. Available filters are listed here U(https://cloud.google.com/sdk/gcloud/reference/topic/filters.)
- Each additional filter in the list will act be added as an AND condition (filter1
and filter2) .
region:
description:
- URL of the region where the regional address resides.
- This field is not applicable to global addresses.
required: true
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: " a address facts"
gcp_compute_address_facts:
region: us-west1
filters:
- name = test_object
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: facts
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
address:
description:
- The static external IP address represented by this resource. Only IPv4 is
supported. An address may only be specified for INTERNAL address types. The
IP address must be inside the specified subnetwork, if any.
returned: success
type: str
addressType:
description:
- The type of address to reserve, either INTERNAL or EXTERNAL.
- If unspecified, defaults to EXTERNAL.
returned: success
type: str
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource.
returned: success
type: int
name:
description:
- Name of the resource. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match the
regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character
must be a lowercase letter, and all following characters must be a dash, lowercase
letter, or digit, except the last character, which cannot be a dash.
returned: success
type: str
networkTier:
description:
- 'The networking tier used for configuring this address. This field can take
the following values: PREMIUM or STANDARD. If this field is not specified,
it is assumed to be PREMIUM.'
returned: success
type: str
subnetwork:
description:
- The URL of the subnetwork in which to reserve the address. If an IP address
is specified, it must be within the subnetwork's IP range.
- This field can only be used with INTERNAL type with GCE_ENDPOINT/DNS_RESOLVER
purposes.
returned: success
type: dict
users:
description:
- The URLs of the resources that are using this address.
returned: success
type: list
region:
description:
- URL of the region where the regional address resides.
- This field is not applicable to global addresses.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(filters=dict(type='list', elements='str'), region=dict(required=True, type='str')))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
items = fetch_list(module, collection(module), query_options(module.params['filters']))
if items.get('items'):
items = items.get('items')
else:
items = []
return_value = {'items': items}
module.exit_json(**return_value)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/addresses".format(**module.params)
def fetch_list(module, link, query):
auth = GcpSession(module, 'compute')
response = auth.get(link, params={'filter': query})
return return_if_object(module, response)
def query_options(filters):
if not filters:
return ''
if len(filters) == 1:
return filters[0]
else:
queries = []
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
else:
queries.append(f)
return ' '.join(queries)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| gpl-3.0 |
admetricks/phantomjs | src/breakpad/src/tools/gyp/gyptest.py | 137 | 7245 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__doc__ = """
gyptest.py -- test runner for GYP tests.
"""
import os
import optparse
import subprocess
import sys
class CommandRunner:
"""
Executor class for commands, including "commands" implemented by
Python functions.
"""
verbose = True
active = True
def __init__(self, dictionary={}):
self.subst_dictionary(dictionary)
def subst_dictionary(self, dictionary):
self._subst_dictionary = dictionary
def subst(self, string, dictionary=None):
"""
Substitutes (via the format operator) the values in the specified
dictionary into the specified command.
The command can be an (action, string) tuple. In all cases, we
perform substitution on strings and don't worry if something isn't
a string. (It's probably a Python function to be executed.)
"""
if dictionary is None:
dictionary = self._subst_dictionary
if dictionary:
try:
string = string % dictionary
except TypeError:
pass
return string
def display(self, command, stdout=None, stderr=None):
if not self.verbose:
return
if type(command) == type(()):
func = command[0]
args = command[1:]
s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
if type(command) == type([]):
# TODO: quote arguments containing spaces
# TODO: handle meta characters?
s = ' '.join(command)
else:
s = self.subst(command)
if not s.endswith('\n'):
s += '\n'
sys.stdout.write(s)
sys.stdout.flush()
def execute(self, command, stdout=None, stderr=None):
"""
Executes a single command.
"""
if not self.active:
return 0
if type(command) == type(''):
command = self.subst(command)
cmdargs = shlex.split(command)
if cmdargs[0] == 'cd':
command = (os.chdir,) + tuple(cmdargs[1:])
if type(command) == type(()):
func = command[0]
args = command[1:]
return func(*args)
else:
if stdout is sys.stdout:
# Same as passing sys.stdout, except python2.4 doesn't fail on it.
subout = None
else:
# Open pipe for anything else so Popen works on python2.4.
subout = subprocess.PIPE
if stderr is sys.stderr:
# Same as passing sys.stderr, except python2.4 doesn't fail on it.
suberr = None
elif stderr is None:
# Merge with stdout if stderr isn't specified.
suberr = subprocess.STDOUT
else:
# Open pipe for anything else so Popen works on python2.4.
suberr = subprocess.PIPE
p = subprocess.Popen(command,
shell=(sys.platform == 'win32'),
stdout=subout,
stderr=suberr)
p.wait()
if stdout is None:
self.stdout = p.stdout.read()
elif stdout is not sys.stdout:
stdout.write(p.stdout.read())
if stderr not in (None, sys.stderr):
stderr.write(p.stderr.read())
return p.returncode
def run(self, command, display=None, stdout=None, stderr=None):
"""
Runs a single command, displaying it first.
"""
if display is None:
display = command
self.display(display)
return self.execute(command, stdout, stderr)
class Unbuffered:
def __init__(self, fp):
self.fp = fp
def write(self, arg):
self.fp.write(arg)
self.fp.flush()
def __getattr__(self, attr):
return getattr(self.fp, attr)
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
def find_all_gyptest_files(directory):
result = []
for root, dirs, files in os.walk(directory):
if '.svn' in dirs:
dirs.remove('.svn')
result.extend([ os.path.join(root, f) for f in files
if f.startswith('gyptest') and f.endswith('.py') ])
result.sort()
return result
def main(argv=None):
if argv is None:
argv = sys.argv
usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-a", "--all", action="store_true",
help="run all tests")
parser.add_option("-C", "--chdir", action="store", default=None,
help="chdir to the specified directory")
parser.add_option("-f", "--format", action="store", default='',
help="run tests with the specified formats")
parser.add_option("-l", "--list", action="store_true",
help="list available tests and exit")
parser.add_option("-n", "--no-exec", action="store_true",
help="no execute, just print the command line")
parser.add_option("--passed", action="store_true",
help="report passed tests")
parser.add_option("--path", action="append", default=[],
help="additional $PATH directory")
parser.add_option("-q", "--quiet", action="store_true",
help="quiet, don't print test command lines")
opts, args = parser.parse_args(argv[1:])
if opts.chdir:
os.chdir(opts.chdir)
if opts.path:
os.environ['PATH'] += ':' + ':'.join(opts.path)
if not args:
if not opts.all:
sys.stderr.write('Specify -a to get all tests.\n')
return 1
args = ['test']
tests = []
for arg in args:
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
tests.append(arg)
if opts.list:
for test in tests:
print test
sys.exit(0)
CommandRunner.verbose = not opts.quiet
CommandRunner.active = not opts.no_exec
cr = CommandRunner()
os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
if not opts.quiet:
sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
passed = []
failed = []
no_result = []
if opts.format:
format_list = opts.format.split(',')
else:
# TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = [ {
'freebsd7': 'make',
'freebsd8': 'make',
'cygwin': 'msvs',
'win32': 'msvs',
'linux2': 'scons',
'darwin': 'xcode',
}[sys.platform] ]
for format in format_list:
os.environ['TESTGYP_FORMAT'] = format
if not opts.quiet:
sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
for test in tests:
status = cr.run([sys.executable, test],
stdout=sys.stdout,
stderr=sys.stderr)
if status == 2:
no_result.append(test)
elif status:
failed.append(test)
else:
passed.append(test)
if not opts.quiet:
def report(description, tests):
if tests:
if len(tests) == 1:
sys.stdout.write("\n%s the following test:\n" % description)
else:
fmt = "\n%s the following %d tests:\n"
sys.stdout.write(fmt % (description, len(tests)))
sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
if opts.passed:
report("Passed", passed)
report("Failed", failed)
report("No result from", no_result)
if failed:
return 1
else:
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
zycdragonball/tensorflow | tensorflow/python/tools/print_selective_registration_header_test.py | 60 | 6671 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for print_selective_registration_header."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.tools import selective_registration_header_lib
# Note that this graph def is not valid to be loaded - its inputs are not
# assigned correctly in all cases.
GRAPH_DEF_TXT = """
node: {
name: "node_1"
op: "Reshape"
input: [ "none", "none" ]
device: "/cpu:0"
attr: { key: "T" value: { type: DT_FLOAT } }
}
node: {
name: "node_2"
op: "MatMul"
input: [ "none", "none" ]
device: "/cpu:0"
attr: { key: "T" value: { type: DT_FLOAT } }
attr: { key: "transpose_a" value: { b: false } }
attr: { key: "transpose_b" value: { b: false } }
}
node: {
name: "node_3"
op: "MatMul"
input: [ "none", "none" ]
device: "/cpu:0"
attr: { key: "T" value: { type: DT_DOUBLE } }
attr: { key: "transpose_a" value: { b: false } }
attr: { key: "transpose_b" value: { b: false } }
}
"""
GRAPH_DEF_TXT_2 = """
node: {
name: "node_4"
op: "BiasAdd"
input: [ "none", "none" ]
device: "/cpu:0"
attr: { key: "T" value: { type: DT_FLOAT } }
}
"""
class PrintOpFilegroupTest(test.TestCase):
def setUp(self):
_, self.script_name = os.path.split(sys.argv[0])
def WriteGraphFiles(self, graphs):
fnames = []
for i, graph in enumerate(graphs):
fname = os.path.join(self.get_temp_dir(), 'graph%s.pb' % i)
with gfile.GFile(fname, 'wb') as f:
f.write(graph.SerializeToString())
fnames.append(fname)
return fnames
def testGetOps(self):
default_ops = 'NoOp:NoOp,_Recv:RecvOp,_Send:SendOp'
graphs = [
text_format.Parse(d, graph_pb2.GraphDef())
for d in [GRAPH_DEF_TXT, GRAPH_DEF_TXT_2]
]
ops_and_kernels = selective_registration_header_lib.get_ops_and_kernels(
'rawproto', self.WriteGraphFiles(graphs), default_ops)
self.assertListEqual(
[
('BiasAdd', 'BiasOp<CPUDevice, float>'), #
('MatMul', 'MatMulOp<CPUDevice, double, false >'), #
('MatMul', 'MatMulOp<CPUDevice, float, false >'), #
('NoOp', 'NoOp'), #
('Reshape', 'ReshapeOp'), #
('_Recv', 'RecvOp'), #
('_Send', 'SendOp'), #
],
ops_and_kernels)
graphs[0].node[0].ClearField('device')
graphs[0].node[2].ClearField('device')
ops_and_kernels = selective_registration_header_lib.get_ops_and_kernels(
'rawproto', self.WriteGraphFiles(graphs), default_ops)
self.assertListEqual(
[
('BiasAdd', 'BiasOp<CPUDevice, float>'), #
('MatMul', 'MatMulOp<CPUDevice, double, false >'), #
('MatMul', 'MatMulOp<CPUDevice, float, false >'), #
('NoOp', 'NoOp'), #
('Reshape', 'ReshapeOp'), #
('_Recv', 'RecvOp'), #
('_Send', 'SendOp'), #
],
ops_and_kernels)
def testAll(self):
default_ops = 'all'
graphs = [
text_format.Parse(d, graph_pb2.GraphDef())
for d in [GRAPH_DEF_TXT, GRAPH_DEF_TXT_2]
]
ops_and_kernels = selective_registration_header_lib.get_ops_and_kernels(
'rawproto', self.WriteGraphFiles(graphs), default_ops)
header = selective_registration_header_lib.get_header_from_ops_and_kernels(
ops_and_kernels, include_all_ops_and_kernels=True)
self.assertListEqual(
[
'// This file was autogenerated by %s' % self.script_name,
'#ifndef OPS_TO_REGISTER', #
'#define OPS_TO_REGISTER', #
'#define SHOULD_REGISTER_OP(op) true', #
'#define SHOULD_REGISTER_OP_KERNEL(clz) true', #
'#define SHOULD_REGISTER_OP_GRADIENT true', #
'#endif'
],
header.split('\n'))
self.assertListEqual(
header.split('\n'),
selective_registration_header_lib.get_header(
self.WriteGraphFiles(graphs), 'rawproto', default_ops).split('\n'))
def testGetSelectiveHeader(self):
default_ops = ''
graphs = [text_format.Parse(GRAPH_DEF_TXT_2, graph_pb2.GraphDef())]
expected = '''// This file was autogenerated by %s
#ifndef OPS_TO_REGISTER
#define OPS_TO_REGISTER
namespace {
constexpr const char* skip(const char* x) {
return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x;
}
constexpr bool isequal(const char* x, const char* y) {
return (*skip(x) && *skip(y))
? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1))
: (!*skip(x) && !*skip(y));
}
template<int N>
struct find_in {
static constexpr bool f(const char* x, const char* const y[N]) {
return isequal(x, y[0]) || find_in<N - 1>::f(x, y + 1);
}
};
template<>
struct find_in<0> {
static constexpr bool f(const char* x, const char* const y[]) {
return false;
}
};
} // end namespace
constexpr const char* kNecessaryOpKernelClasses[] = {
"BiasOp<CPUDevice, float>",
};
#define SHOULD_REGISTER_OP_KERNEL(clz) (find_in<sizeof(kNecessaryOpKernelClasses) / sizeof(*kNecessaryOpKernelClasses)>::f(clz, kNecessaryOpKernelClasses))
constexpr inline bool ShouldRegisterOp(const char op[]) {
return false
|| isequal(op, "BiasAdd")
;
}
#define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op)
#define SHOULD_REGISTER_OP_GRADIENT false
#endif''' % self.script_name
header = selective_registration_header_lib.get_header(
self.WriteGraphFiles(graphs), 'rawproto', default_ops)
print(header)
self.assertListEqual(expected.split('\n'), header.split('\n'))
if __name__ == '__main__':
test.main()
| apache-2.0 |
danielmellado/tempest | tempest/common/fixed_network.py | 2 | 5474 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_log import log as logging
from tempest_lib.common.utils import misc as misc_utils
from tempest_lib import exceptions as lib_exc
from tempest import config
from tempest import exceptions
CONF = config.CONF
LOG = logging.getLogger(__name__)
def get_network_from_name(name, compute_networks_client):
"""Get a full network dict from just a network name
:param str name: the name of the network to use
:param NetworksClientJSON compute_networks_client: The network client
object to use for making the network lists api request
:return: The full dictionary for the network in question
:rtype: dict
:raises InvalidConfiguration: If the name provided is invalid, the networks
list returns a 404, there are no found networks, or the found network
is invalid
"""
caller = misc_utils.find_test_caller()
if not name:
raise exceptions.InvalidConfiguration()
try:
networks = compute_networks_client.list_networks(name=name)
except lib_exc.NotFound:
# In case of nova network, if the fixed_network_name is not
# owned by the tenant, and the network client is not an admin
# one, list_networks will not find it
msg = ('Unable to find network %s. '
'Starting instance without specifying a network.' %
name)
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.info(msg)
raise exceptions.InvalidConfiguration()
# Check that a network exists, else raise an InvalidConfigurationException
if len(networks) == 1:
network = sorted(networks)[0]
elif len(networks) > 1:
msg = ("Network with name: %s had multiple matching networks in the "
"list response: %s\n Unable to specify a single network" % (
name, networks))
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warn(msg)
raise exceptions.InvalidConfiguration()
else:
msg = "Network with name: %s not found" % name
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warn(msg)
raise exceptions.InvalidConfiguration()
# To be consistent between neutron and nova network always use name even
# if label is used in the api response. If neither is present than then
# the returned network is invalid.
name = network.get('name') or network.get('label')
if not name:
msg = "Network found from list doesn't contain a valid name or label"
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warn(msg)
raise exceptions.InvalidConfiguration()
network['name'] = name
return network
def get_tenant_network(creds_provider, compute_networks_client):
"""Get a network usable by the primary tenant
:param creds_provider: instance of credential provider
:param compute_networks_client: compute network client. We want to have the
compute network client so we can have use a common approach for both
neutron and nova-network cases. If this is not an admin network
client, set_network_kwargs might fail in case fixed_network_name
is the network to be used, and it's not visible to the tenant
:return a dict with 'id' and 'name' of the network
"""
caller = misc_utils.find_test_caller()
fixed_network_name = CONF.compute.fixed_network_name
net_creds = creds_provider.get_primary_creds()
network = getattr(net_creds, 'network', None)
if not network or not network.get('name'):
if fixed_network_name:
msg = ('No valid network provided or created, defaulting to '
'fixed_network_name')
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.debug(msg)
try:
network = get_network_from_name(fixed_network_name,
compute_networks_client)
except exceptions.InvalidConfiguration:
network = {}
msg = ('Found network %s available for tenant' % network)
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.info(msg)
return network
def set_networks_kwarg(network, kwargs=None):
"""Set 'networks' kwargs for a server create if missing
:param network: dict of network to be used with 'id' and 'name'
:param kwargs: server create kwargs to be enhanced
:return: new dict of kwargs updated to include networks
"""
params = copy.copy(kwargs) or {}
if kwargs and 'networks' in kwargs:
return params
if network:
if 'id' in network.keys():
params.update({"networks": [{'uuid': network['id']}]})
else:
LOG.warn('The provided network dict: %s was invalid and did not '
' contain an id' % network)
return params
| apache-2.0 |
gdi2290/django | tests/validation/test_picklable.py | 576 | 2010 | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError([ValidationError('a'), ValidationError('b')])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
| bsd-3-clause |
ofir123/CouchPotatoServer | libs/chardet/hebrewprober.py | 2929 | 13359 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe, eDetecting
from .compat import wrap_ord
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = 0xea
NORMAL_KAF = 0xeb
FINAL_MEM = 0xed
NORMAL_MEM = 0xee
FINAL_NUN = 0xef
NORMAL_NUN = 0xf0
FINAL_PE = 0xf3
NORMAL_PE = 0xf4
FINAL_TSADI = 0xf5
NORMAL_TSADI = 0xf6
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score
# distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score
# distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate
# a word delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return wrap_ord(c) in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE,
FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters
# causing the Non-Final tsadi to appear at an end of a word even
# though this is not the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
# for example legally end with a Non-Final Pe or Kaf. However, the
# benefit of these letters as Non-Final letters outweighs the damage
# since these words are quite rare.
return wrap_ord(c) in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew
# or visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is
# an indication that the text is laid out "naturally" since the
# final letter really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
# should not end with the Non-Final form of that letter. Exceptions
# to this rule are mentioned above in isNonFinal(). This is an
# indication that the text is laid out backwards. +1 for visual
# score
# 3) A word longer than 1 letter, starting with a final letter. Final
# letters should not appear at the beginning of a word. This is an
# indication that the text is laid out backwards. +1 for visual
# score.
#
# The visual score and logical score are accumulated throughout the
# text and are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since
# that case is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with
# spaces) so the word boundary detection works properly. [MAP]
if self.get_state() == eNotMe:
# Both model probers say it's not them. No reason to continue.
return eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a
# 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][
# cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if ((self._mBeforePrev == ' ') and
(self.is_final(self._mPrev)) and (cur != ' ')):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return
# eNotMe (handled above)
return eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = (self._mLogicalProber.get_confidence()
- self._mVisualProber.get_confidence())
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the
# day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to
# Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == eNotMe) and \
(self._mVisualProber.get_state() == eNotMe):
return eNotMe
return eDetecting
| gpl-3.0 |
Kazade/NeHe-Website | google_appengine/lib/django-1.2/django/core/management/commands/dbshell.py | 313 | 1261 | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import connections, DEFAULT_DB_ALIAS
class Command(BaseCommand):
help = ("Runs the command-line client for specified database, or the "
"default database if none is provided.")
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database onto which to '
'open a shell. Defaults to the "default" database.'),
)
requires_model_validation = False
def handle(self, **options):
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
try:
connection.client.runshell()
except OSError:
# Note that we're assuming OSError means that the client program
# isn't installed. There's a possibility OSError would be raised
# for some other reason, in which case this error message would be
# inaccurate. Still, this message catches the common case.
raise CommandError('You appear not to have the %r program installed or on your path.' % \
connection.client.executable_name)
| bsd-3-clause |
robhudson/django | tests/or_lookups/tests.py | 109 | 7672 | from __future__ import unicode_literals
from datetime import datetime
from operator import attrgetter
from django.db.models import Q
from django.test import TestCase
from .models import Article
class OrLookupsTests(TestCase):
def setUp(self):
self.a1 = Article.objects.create(
headline='Hello', pub_date=datetime(2005, 11, 27)
).pk
self.a2 = Article.objects.create(
headline='Goodbye', pub_date=datetime(2005, 11, 28)
).pk
self.a3 = Article.objects.create(
headline='Hello and goodbye', pub_date=datetime(2005, 11, 29)
).pk
def test_filter_or(self):
self.assertQuerysetEqual(
(
Article.objects.filter(headline__startswith='Hello')
| Article.objects.filter(headline__startswith='Goodbye')
), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline")
)
self.assertQuerysetEqual(
Article.objects.filter(headline__contains='Hello') | Article.objects.filter(headline__contains='bye'), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline")
)
self.assertQuerysetEqual(
Article.objects.filter(headline__iexact='Hello') | Article.objects.filter(headline__contains='ood'), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline")
)
self.assertQuerysetEqual(
Article.objects.filter(Q(headline__startswith='Hello') | Q(headline__startswith='Goodbye')), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline")
)
def test_stages(self):
# You can shorten this syntax with code like the following, which is
# especially useful if building the query in stages:
articles = Article.objects.all()
self.assertQuerysetEqual(
articles.filter(headline__startswith='Hello') & articles.filter(headline__startswith='Goodbye'),
[]
)
self.assertQuerysetEqual(
articles.filter(headline__startswith='Hello') & articles.filter(headline__contains='bye'), [
'Hello and goodbye'
],
attrgetter("headline")
)
def test_pk_q(self):
self.assertQuerysetEqual(
Article.objects.filter(Q(pk=self.a1) | Q(pk=self.a2)), [
'Hello',
'Goodbye'
],
attrgetter("headline")
)
self.assertQuerysetEqual(
Article.objects.filter(Q(pk=self.a1) | Q(pk=self.a2) | Q(pk=self.a3)), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline"),
)
def test_pk_in(self):
self.assertQuerysetEqual(
Article.objects.filter(pk__in=[self.a1, self.a2, self.a3]), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline"),
)
self.assertQuerysetEqual(
Article.objects.filter(pk__in=(self.a1, self.a2, self.a3)), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline"),
)
self.assertQuerysetEqual(
Article.objects.filter(pk__in=[self.a1, self.a2, self.a3, 40000]), [
'Hello',
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline"),
)
def test_q_negated(self):
# Q objects can be negated
self.assertQuerysetEqual(
Article.objects.filter(Q(pk=self.a1) | ~Q(pk=self.a2)), [
'Hello',
'Hello and goodbye'
],
attrgetter("headline")
)
self.assertQuerysetEqual(
Article.objects.filter(~Q(pk=self.a1) & ~Q(pk=self.a2)), [
'Hello and goodbye'
],
attrgetter("headline"),
)
# This allows for more complex queries than filter() and exclude()
# alone would allow
self.assertQuerysetEqual(
Article.objects.filter(Q(pk=self.a1) & (~Q(pk=self.a2) | Q(pk=self.a3))), [
'Hello'
],
attrgetter("headline"),
)
def test_complex_filter(self):
# The 'complex_filter' method supports framework features such as
# 'limit_choices_to' which normally take a single dictionary of lookup
# arguments but need to support arbitrary queries via Q objects too.
self.assertQuerysetEqual(
Article.objects.complex_filter({'pk': self.a1}), [
'Hello'
],
attrgetter("headline"),
)
self.assertQuerysetEqual(
Article.objects.complex_filter(Q(pk=self.a1) | Q(pk=self.a2)), [
'Hello',
'Goodbye'
],
attrgetter("headline"),
)
def test_empty_in(self):
# Passing "in" an empty list returns no results ...
self.assertQuerysetEqual(
Article.objects.filter(pk__in=[]),
[]
)
# ... but can return results if we OR it with another query.
self.assertQuerysetEqual(
Article.objects.filter(Q(pk__in=[]) | Q(headline__icontains='goodbye')), [
'Goodbye',
'Hello and goodbye'
],
attrgetter("headline"),
)
def test_q_and(self):
# Q arg objects are ANDed
self.assertQuerysetEqual(
Article.objects.filter(Q(headline__startswith='Hello'), Q(headline__contains='bye')), [
'Hello and goodbye'
],
attrgetter("headline")
)
# Q arg AND order is irrelevant
self.assertQuerysetEqual(
Article.objects.filter(Q(headline__contains='bye'), headline__startswith='Hello'), [
'Hello and goodbye'
],
attrgetter("headline"),
)
self.assertQuerysetEqual(
Article.objects.filter(Q(headline__startswith='Hello') & Q(headline__startswith='Goodbye')),
[]
)
def test_q_exclude(self):
self.assertQuerysetEqual(
Article.objects.exclude(Q(headline__startswith='Hello')), [
'Goodbye'
],
attrgetter("headline")
)
def test_other_arg_queries(self):
# Try some arg queries with operations other than filter.
self.assertEqual(
Article.objects.get(Q(headline__startswith='Hello'), Q(headline__contains='bye')).headline,
'Hello and goodbye'
)
self.assertEqual(
Article.objects.filter(Q(headline__startswith='Hello') | Q(headline__contains='bye')).count(),
3
)
self.assertQuerysetEqual(
Article.objects.filter(Q(headline__startswith='Hello'), Q(headline__contains='bye')).values(), [
{"headline": "Hello and goodbye", "id": self.a3, "pub_date": datetime(2005, 11, 29)},
],
lambda o: o,
)
self.assertEqual(
Article.objects.filter(Q(headline__startswith='Hello')).in_bulk([self.a1, self.a2]),
{self.a1: Article.objects.get(pk=self.a1)}
)
| bsd-3-clause |
enochd/RMG-Py | external/cclib/method/volume.py | 24 | 9507 | """
cclib (http://cclib.sf.net) is (c) 2006, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
"""
__revision__ = "$Revision: 742 $"
import copy
import numpy
try:
from PyQuante.CGBF import CGBF
module_pyq = True
except:
module_pyq = False
try:
from pyvtk import *
from pyvtk.DataSetAttr import *
module_pyvtk = True
except:
module_pyvtk = False
from cclib.bridge import makepyquante
from cclib.parser.utils import convertor
class Volume(object):
"""Represent a volume in space.
Required parameters:
origin -- the bottom left hand corner of the volume
topcorner -- the top right hand corner
spacing -- the distance between the points in the cube
Attributes:
data -- a numpy array of values for each point in the volume
(set to zero at initialisation)
numpts -- the numbers of points in the (x,y,z) directions
"""
def __init__(self, origin, topcorner, spacing):
self.origin = origin
self.spacing = spacing
self.topcorner = topcorner
self.numpts = []
for i in range(3):
self.numpts.append(int((self.topcorner[i]-self.origin[i])/self.spacing[i] + 1) )
self.data = numpy.zeros( tuple(self.numpts), "d")
def __str__(self):
"""Return a string representation."""
return "Volume %s to %s (density: %s)" % (self.origin, self.topcorner,
self.spacing)
def write(self, filename, format="Cube"):
"""Write the volume to file."""
format = format.upper()
if format.upper() not in ["VTK", "CUBE"]:
raise "Format must be either VTK or Cube"
elif format=="VTK":
self.writeasvtk(filename)
else:
self.writeascube(filename)
def writeasvtk(self, filename):
if not module_pyvtk:
raise Exception, "You need to have pyvtk installed"
ranges = (numpy.arange(self.data.shape[2]),
numpy.arange(self.data.shape[1]),
numpy.arange(self.data.shape[0]))
v = VtkData(RectilinearGrid(*ranges), "Test",
PointData(Scalars(self.data.ravel(), "from cclib", "default")))
v.tofile(filename)
def integrate(self):
boxvol = (self.spacing[0] * self.spacing[1] * self.spacing[2] *
convertor(1, "Angstrom", "bohr")**3)
return sum(self.data.ravel()) * boxvol
def integrate_square(self):
boxvol = (self.spacing[0] * self.spacing[1] * self.spacing[2] *
convertor(1, "Angstrom", "bohr")**3)
return sum(self.data.ravel()**2) * boxvol
def writeascube(self, filename):
# Remember that the units are bohr, not Angstroms
convert = lambda x : convertor(x, "Angstrom", "bohr")
ans = []
ans.append("Cube file generated by cclib")
ans.append("")
format = "%4d%12.6f%12.6f%12.6f"
origin = [convert(x) for x in self.origin]
ans.append(format % (0, origin[0], origin[1], origin[2]))
ans.append(format % (self.data.shape[0], convert(self.spacing[0]), 0.0, 0.0))
ans.append(format % (self.data.shape[1], 0.0, convert(self.spacing[1]), 0.0))
ans.append(format % (self.data.shape[2], 0.0, 0.0, convert(self.spacing[2])))
line = []
for i in range(self.data.shape[0]):
for j in range(self.data.shape[1]):
for k in range(self.data.shape[2]):
line.append(scinotation(self.data[i][j][k]))
if len(line)==6:
ans.append(" ".join(line))
line = []
if line:
ans.append(" ".join(line))
line = []
outputfile = open(filename, "w")
outputfile.write("\n".join(ans))
outputfile.close()
def scinotation(num):
"""Write in scientific notation
>>> scinotation(1./654)
' 1.52905E-03'
>>> scinotation(-1./654)
'-1.52905E-03'
"""
ans = "%10.5E" % num
broken = ans.split("E")
exponent = int(broken[1])
if exponent<-99:
return " 0.000E+00"
if exponent<0:
sign="-"
else:
sign="+"
return ("%sE%s%s" % (broken[0],sign,broken[1][-2:])).rjust(12)
def getbfs(coords, gbasis):
"""Convenience function for both wavefunction and density based on PyQuante Ints.py."""
mymol = makepyquante(coords, [0 for x in coords])
sym2powerlist = {
'S' : [(0,0,0)],
'P' : [(1,0,0),(0,1,0),(0,0,1)],
'D' : [(2,0,0),(0,2,0),(0,0,2),(1,1,0),(0,1,1),(1,0,1)],
'F' : [(3,0,0),(2,1,0),(2,0,1),(1,2,0),(1,1,1),(1,0,2),
(0,3,0),(0,2,1),(0,1,2), (0,0,3)]
}
bfs = []
for i,atom in enumerate(mymol):
bs = gbasis[i]
for sym,prims in bs:
for power in sym2powerlist[sym]:
bf = CGBF(atom.pos(),power)
for expnt,coef in prims:
bf.add_primitive(expnt,coef)
bf.normalize()
bfs.append(bf)
return bfs
def wavefunction(coords, mocoeffs, gbasis, volume):
"""Calculate the magnitude of the wavefunction at every point in a volume.
Attributes:
coords -- the coordinates of the atoms
mocoeffs -- mocoeffs for one eigenvalue
gbasis -- gbasis from a parser object
volume -- a template Volume object (will not be altered)
"""
bfs = getbfs(coords, gbasis)
wavefn = copy.copy(volume)
wavefn.data = numpy.zeros( wavefn.data.shape, "d")
conversion = convertor(1,"bohr","Angstrom")
x = numpy.arange(wavefn.origin[0], wavefn.topcorner[0]+wavefn.spacing[0], wavefn.spacing[0]) / conversion
y = numpy.arange(wavefn.origin[1], wavefn.topcorner[1]+wavefn.spacing[1], wavefn.spacing[1]) / conversion
z = numpy.arange(wavefn.origin[2], wavefn.topcorner[2]+wavefn.spacing[2], wavefn.spacing[2]) / conversion
for bs in range(len(bfs)):
data = numpy.zeros( wavefn.data.shape, "d")
for i,xval in enumerate(x):
for j,yval in enumerate(y):
for k,zval in enumerate(z):
data[i, j, k] = bfs[bs].amp(xval,yval,zval)
numpy.multiply(data, mocoeffs[bs], data)
numpy.add(wavefn.data, data, wavefn.data)
return wavefn
def electrondensity(coords, mocoeffslist, gbasis, volume):
"""Calculate the magnitude of the electron density at every point in a volume.
Attributes:
coords -- the coordinates of the atoms
mocoeffs -- mocoeffs for all of the occupied eigenvalues
gbasis -- gbasis from a parser object
volume -- a template Volume object (will not be altered)
Note: mocoeffs is a list of numpy arrays. The list will be of length 1
for restricted calculations, and length 2 for unrestricted.
"""
bfs = getbfs(coords, gbasis)
density = copy.copy(volume)
density.data = numpy.zeros( density.data.shape, "d")
conversion = convertor(1,"bohr","Angstrom")
x = numpy.arange(density.origin[0], density.topcorner[0]+density.spacing[0], density.spacing[0]) / conversion
y = numpy.arange(density.origin[1], density.topcorner[1]+density.spacing[1], density.spacing[1]) / conversion
z = numpy.arange(density.origin[2], density.topcorner[2]+density.spacing[2], density.spacing[2]) / conversion
for mocoeffs in mocoeffslist:
for mocoeff in mocoeffs:
wavefn = numpy.zeros( density.data.shape, "d")
for bs in range(len(bfs)):
data = numpy.zeros( density.data.shape, "d")
for i,xval in enumerate(x):
for j,yval in enumerate(y):
tmp = []
for k,zval in enumerate(z):
tmp.append(bfs[bs].amp(xval, yval, zval))
data[i,j,:] = tmp
numpy.multiply(data, mocoeff[bs], data)
numpy.add(wavefn, data, wavefn)
density.data += wavefn**2
if len(mocoeffslist) == 1:
density.data = density.data*2. # doubly-occupied
return density
if __name__=="__main__":
try:
import psyco
psyco.full()
except ImportError:
pass
from cclib.parser import ccopen
import logging
a = ccopen("../../../data/Gaussian/basicGaussian03/dvb_sp_basis.log")
a.logger.setLevel(logging.ERROR)
c = a.parse()
b = ccopen("../../../data/Gaussian/basicGaussian03/dvb_sp.out")
b.logger.setLevel(logging.ERROR)
d = b.parse()
vol = Volume( (-3.0,-6,-2.0), (3.0, 6, 2.0), spacing=(0.25,0.25,0.25) )
wavefn = wavefunction(d.atomcoords[0], d.mocoeffs[0][d.homos[0]],
c.gbasis, vol)
assert abs(wavefn.integrate())<1E-6 # not necessarily true for all wavefns
assert abs(wavefn.integrate_square() - 1.00)<1E-3 # true for all wavefns
print wavefn.integrate(), wavefn.integrate_square()
vol = Volume( (-3.0,-6,-2.0), (3.0, 6, 2.0), spacing=(0.25,0.25,0.25) )
frontierorbs = [d.mocoeffs[0][(d.homos[0]-3):(d.homos[0]+1)]]
density = electrondensity(d.atomcoords[0], frontierorbs, c.gbasis, vol)
assert abs(density.integrate()-8.00)<1E-2
print "Combined Density of 4 Frontier orbitals=",density.integrate()
| mit |
gerashegalov/Impala | thirdparty/hive-1.1.0-cdh5.5.0-SNAPSHOT/src/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py | 16 | 856786 | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import fb303.FacebookService
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(fb303.FacebookService.Iface):
"""
This interface is live.
"""
def getMetaConf(self, key):
"""
Parameters:
- key
"""
pass
def setMetaConf(self, key, value):
"""
Parameters:
- key
- value
"""
pass
def create_database(self, database):
"""
Parameters:
- database
"""
pass
def get_database(self, name):
"""
Parameters:
- name
"""
pass
def drop_database(self, name, deleteData, cascade):
"""
Parameters:
- name
- deleteData
- cascade
"""
pass
def get_databases(self, pattern):
"""
Parameters:
- pattern
"""
pass
def get_all_databases(self, ):
pass
def alter_database(self, dbname, db):
"""
Parameters:
- dbname
- db
"""
pass
def get_type(self, name):
"""
Parameters:
- name
"""
pass
def create_type(self, type):
"""
Parameters:
- type
"""
pass
def drop_type(self, type):
"""
Parameters:
- type
"""
pass
def get_type_all(self, name):
"""
Parameters:
- name
"""
pass
def get_fields(self, db_name, table_name):
"""
Parameters:
- db_name
- table_name
"""
pass
def get_fields_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
pass
def get_schema(self, db_name, table_name):
"""
Parameters:
- db_name
- table_name
"""
pass
def get_schema_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
pass
def create_table(self, tbl):
"""
Parameters:
- tbl
"""
pass
def create_table_with_environment_context(self, tbl, environment_context):
"""
Parameters:
- tbl
- environment_context
"""
pass
def drop_table(self, dbname, name, deleteData):
"""
Parameters:
- dbname
- name
- deleteData
"""
pass
def drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
"""
Parameters:
- dbname
- name
- deleteData
- environment_context
"""
pass
def get_tables(self, db_name, pattern):
"""
Parameters:
- db_name
- pattern
"""
pass
def get_all_tables(self, db_name):
"""
Parameters:
- db_name
"""
pass
def get_table(self, dbname, tbl_name):
"""
Parameters:
- dbname
- tbl_name
"""
pass
def get_table_objects_by_name(self, dbname, tbl_names):
"""
Parameters:
- dbname
- tbl_names
"""
pass
def get_table_names_by_filter(self, dbname, filter, max_tables):
"""
Parameters:
- dbname
- filter
- max_tables
"""
pass
def alter_table(self, dbname, tbl_name, new_tbl):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
"""
pass
def alter_table_with_environment_context(self, dbname, tbl_name, new_tbl, environment_context):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
- environment_context
"""
pass
def alter_table_with_cascade(self, dbname, tbl_name, new_tbl, cascade):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
- cascade
"""
pass
def add_partition(self, new_part):
"""
Parameters:
- new_part
"""
pass
def add_partition_with_environment_context(self, new_part, environment_context):
"""
Parameters:
- new_part
- environment_context
"""
pass
def add_partitions(self, new_parts):
"""
Parameters:
- new_parts
"""
pass
def add_partitions_pspec(self, new_parts):
"""
Parameters:
- new_parts
"""
pass
def append_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
pass
def add_partitions_req(self, request):
"""
Parameters:
- request
"""
pass
def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- environment_context
"""
pass
def append_partition_by_name(self, db_name, tbl_name, part_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
"""
pass
def append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_name
- environment_context
"""
pass
def drop_partition(self, db_name, tbl_name, part_vals, deleteData):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- deleteData
"""
pass
def drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- deleteData
- environment_context
"""
pass
def drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
"""
Parameters:
- db_name
- tbl_name
- part_name
- deleteData
"""
pass
def drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_name
- deleteData
- environment_context
"""
pass
def drop_partitions_req(self, req):
"""
Parameters:
- req
"""
pass
def get_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
pass
def exchange_partition(self, partitionSpecs, source_db, source_table_name, dest_db, dest_table_name):
"""
Parameters:
- partitionSpecs
- source_db
- source_table_name
- dest_db
- dest_table_name
"""
pass
def get_partition_with_auth(self, db_name, tbl_name, part_vals, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- user_name
- group_names
"""
pass
def get_partition_by_name(self, db_name, tbl_name, part_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
"""
pass
def get_partitions(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
pass
def get_partitions_with_auth(self, db_name, tbl_name, max_parts, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- max_parts
- user_name
- group_names
"""
pass
def get_partitions_pspec(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
pass
def get_partition_names(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
pass
def get_partitions_ps(self, db_name, tbl_name, part_vals, max_parts):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
"""
pass
def get_partitions_ps_with_auth(self, db_name, tbl_name, part_vals, max_parts, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
- user_name
- group_names
"""
pass
def get_partition_names_ps(self, db_name, tbl_name, part_vals, max_parts):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
"""
pass
def get_partitions_by_filter(self, db_name, tbl_name, filter, max_parts):
"""
Parameters:
- db_name
- tbl_name
- filter
- max_parts
"""
pass
def get_part_specs_by_filter(self, db_name, tbl_name, filter, max_parts):
"""
Parameters:
- db_name
- tbl_name
- filter
- max_parts
"""
pass
def get_partitions_by_expr(self, req):
"""
Parameters:
- req
"""
pass
def get_partitions_by_names(self, db_name, tbl_name, names):
"""
Parameters:
- db_name
- tbl_name
- names
"""
pass
def alter_partition(self, db_name, tbl_name, new_part):
"""
Parameters:
- db_name
- tbl_name
- new_part
"""
pass
def alter_partitions(self, db_name, tbl_name, new_parts):
"""
Parameters:
- db_name
- tbl_name
- new_parts
"""
pass
def alter_partition_with_environment_context(self, db_name, tbl_name, new_part, environment_context):
"""
Parameters:
- db_name
- tbl_name
- new_part
- environment_context
"""
pass
def rename_partition(self, db_name, tbl_name, part_vals, new_part):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- new_part
"""
pass
def partition_name_has_valid_characters(self, part_vals, throw_exception):
"""
Parameters:
- part_vals
- throw_exception
"""
pass
def get_config_value(self, name, defaultValue):
"""
Parameters:
- name
- defaultValue
"""
pass
def partition_name_to_vals(self, part_name):
"""
Parameters:
- part_name
"""
pass
def partition_name_to_spec(self, part_name):
"""
Parameters:
- part_name
"""
pass
def markPartitionForEvent(self, db_name, tbl_name, part_vals, eventType):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- eventType
"""
pass
def isPartitionMarkedForEvent(self, db_name, tbl_name, part_vals, eventType):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- eventType
"""
pass
def add_index(self, new_index, index_table):
"""
Parameters:
- new_index
- index_table
"""
pass
def alter_index(self, dbname, base_tbl_name, idx_name, new_idx):
"""
Parameters:
- dbname
- base_tbl_name
- idx_name
- new_idx
"""
pass
def drop_index_by_name(self, db_name, tbl_name, index_name, deleteData):
"""
Parameters:
- db_name
- tbl_name
- index_name
- deleteData
"""
pass
def get_index_by_name(self, db_name, tbl_name, index_name):
"""
Parameters:
- db_name
- tbl_name
- index_name
"""
pass
def get_indexes(self, db_name, tbl_name, max_indexes):
"""
Parameters:
- db_name
- tbl_name
- max_indexes
"""
pass
def get_index_names(self, db_name, tbl_name, max_indexes):
"""
Parameters:
- db_name
- tbl_name
- max_indexes
"""
pass
def update_table_column_statistics(self, stats_obj):
"""
Parameters:
- stats_obj
"""
pass
def update_partition_column_statistics(self, stats_obj):
"""
Parameters:
- stats_obj
"""
pass
def get_table_column_statistics(self, db_name, tbl_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- col_name
"""
pass
def get_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
- col_name
"""
pass
def get_table_statistics_req(self, request):
"""
Parameters:
- request
"""
pass
def get_partitions_statistics_req(self, request):
"""
Parameters:
- request
"""
pass
def get_aggr_stats_for(self, request):
"""
Parameters:
- request
"""
pass
def set_aggr_stats_for(self, request):
"""
Parameters:
- request
"""
pass
def delete_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
- col_name
"""
pass
def delete_table_column_statistics(self, db_name, tbl_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- col_name
"""
pass
def create_function(self, func):
"""
Parameters:
- func
"""
pass
def drop_function(self, dbName, funcName):
"""
Parameters:
- dbName
- funcName
"""
pass
def alter_function(self, dbName, funcName, newFunc):
"""
Parameters:
- dbName
- funcName
- newFunc
"""
pass
def get_functions(self, dbName, pattern):
"""
Parameters:
- dbName
- pattern
"""
pass
def get_function(self, dbName, funcName):
"""
Parameters:
- dbName
- funcName
"""
pass
def create_role(self, role):
"""
Parameters:
- role
"""
pass
def drop_role(self, role_name):
"""
Parameters:
- role_name
"""
pass
def get_role_names(self, ):
pass
def grant_role(self, role_name, principal_name, principal_type, grantor, grantorType, grant_option):
"""
Parameters:
- role_name
- principal_name
- principal_type
- grantor
- grantorType
- grant_option
"""
pass
def revoke_role(self, role_name, principal_name, principal_type):
"""
Parameters:
- role_name
- principal_name
- principal_type
"""
pass
def list_roles(self, principal_name, principal_type):
"""
Parameters:
- principal_name
- principal_type
"""
pass
def grant_revoke_role(self, request):
"""
Parameters:
- request
"""
pass
def get_principals_in_role(self, request):
"""
Parameters:
- request
"""
pass
def get_role_grants_for_principal(self, request):
"""
Parameters:
- request
"""
pass
def get_privilege_set(self, hiveObject, user_name, group_names):
"""
Parameters:
- hiveObject
- user_name
- group_names
"""
pass
def list_privileges(self, principal_name, principal_type, hiveObject):
"""
Parameters:
- principal_name
- principal_type
- hiveObject
"""
pass
def grant_privileges(self, privileges):
"""
Parameters:
- privileges
"""
pass
def revoke_privileges(self, privileges):
"""
Parameters:
- privileges
"""
pass
def grant_revoke_privileges(self, request):
"""
Parameters:
- request
"""
pass
def set_ugi(self, user_name, group_names):
"""
Parameters:
- user_name
- group_names
"""
pass
def get_delegation_token(self, token_owner, renewer_kerberos_principal_name):
"""
Parameters:
- token_owner
- renewer_kerberos_principal_name
"""
pass
def renew_delegation_token(self, token_str_form):
"""
Parameters:
- token_str_form
"""
pass
def cancel_delegation_token(self, token_str_form):
"""
Parameters:
- token_str_form
"""
pass
def get_open_txns(self, ):
pass
def get_open_txns_info(self, ):
pass
def open_txns(self, rqst):
"""
Parameters:
- rqst
"""
pass
def abort_txn(self, rqst):
"""
Parameters:
- rqst
"""
pass
def commit_txn(self, rqst):
"""
Parameters:
- rqst
"""
pass
def lock(self, rqst):
"""
Parameters:
- rqst
"""
pass
def check_lock(self, rqst):
"""
Parameters:
- rqst
"""
pass
def unlock(self, rqst):
"""
Parameters:
- rqst
"""
pass
def show_locks(self, rqst):
"""
Parameters:
- rqst
"""
pass
def heartbeat(self, ids):
"""
Parameters:
- ids
"""
pass
def heartbeat_txn_range(self, txns):
"""
Parameters:
- txns
"""
pass
def compact(self, rqst):
"""
Parameters:
- rqst
"""
pass
def show_compact(self, rqst):
"""
Parameters:
- rqst
"""
pass
def get_next_notification(self, rqst):
"""
Parameters:
- rqst
"""
pass
def get_current_notificationEventId(self, ):
pass
class Client(fb303.FacebookService.Client, Iface):
"""
This interface is live.
"""
def __init__(self, iprot, oprot=None):
fb303.FacebookService.Client.__init__(self, iprot, oprot)
def getMetaConf(self, key):
"""
Parameters:
- key
"""
self.send_getMetaConf(key)
return self.recv_getMetaConf()
def send_getMetaConf(self, key):
self._oprot.writeMessageBegin('getMetaConf', TMessageType.CALL, self._seqid)
args = getMetaConf_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getMetaConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getMetaConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "getMetaConf failed: unknown result");
def setMetaConf(self, key, value):
"""
Parameters:
- key
- value
"""
self.send_setMetaConf(key, value)
self.recv_setMetaConf()
def send_setMetaConf(self, key, value):
self._oprot.writeMessageBegin('setMetaConf', TMessageType.CALL, self._seqid)
args = setMetaConf_args()
args.key = key
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setMetaConf(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = setMetaConf_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
return
def create_database(self, database):
"""
Parameters:
- database
"""
self.send_create_database(database)
self.recv_create_database()
def send_create_database(self, database):
self._oprot.writeMessageBegin('create_database', TMessageType.CALL, self._seqid)
args = create_database_args()
args.database = database
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_database(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_database_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
return
def get_database(self, name):
"""
Parameters:
- name
"""
self.send_get_database(name)
return self.recv_get_database()
def send_get_database(self, name):
self._oprot.writeMessageBegin('get_database', TMessageType.CALL, self._seqid)
args = get_database_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_database(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_database_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_database failed: unknown result");
def drop_database(self, name, deleteData, cascade):
"""
Parameters:
- name
- deleteData
- cascade
"""
self.send_drop_database(name, deleteData, cascade)
self.recv_drop_database()
def send_drop_database(self, name, deleteData, cascade):
self._oprot.writeMessageBegin('drop_database', TMessageType.CALL, self._seqid)
args = drop_database_args()
args.name = name
args.deleteData = deleteData
args.cascade = cascade
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_database(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_database_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
return
def get_databases(self, pattern):
"""
Parameters:
- pattern
"""
self.send_get_databases(pattern)
return self.recv_get_databases()
def send_get_databases(self, pattern):
self._oprot.writeMessageBegin('get_databases', TMessageType.CALL, self._seqid)
args = get_databases_args()
args.pattern = pattern
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_databases(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_databases_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_databases failed: unknown result");
def get_all_databases(self, ):
self.send_get_all_databases()
return self.recv_get_all_databases()
def send_get_all_databases(self, ):
self._oprot.writeMessageBegin('get_all_databases', TMessageType.CALL, self._seqid)
args = get_all_databases_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_all_databases(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_all_databases_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_databases failed: unknown result");
def alter_database(self, dbname, db):
"""
Parameters:
- dbname
- db
"""
self.send_alter_database(dbname, db)
self.recv_alter_database()
def send_alter_database(self, dbname, db):
self._oprot.writeMessageBegin('alter_database', TMessageType.CALL, self._seqid)
args = alter_database_args()
args.dbname = dbname
args.db = db
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_database(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_database_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def get_type(self, name):
"""
Parameters:
- name
"""
self.send_get_type(name)
return self.recv_get_type()
def send_get_type(self, name):
self._oprot.writeMessageBegin('get_type', TMessageType.CALL, self._seqid)
args = get_type_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_type(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_type_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type failed: unknown result");
def create_type(self, type):
"""
Parameters:
- type
"""
self.send_create_type(type)
return self.recv_create_type()
def send_create_type(self, type):
self._oprot.writeMessageBegin('create_type', TMessageType.CALL, self._seqid)
args = create_type_args()
args.type = type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_type(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_type_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "create_type failed: unknown result");
def drop_type(self, type):
"""
Parameters:
- type
"""
self.send_drop_type(type)
return self.recv_drop_type()
def send_drop_type(self, type):
self._oprot.writeMessageBegin('drop_type', TMessageType.CALL, self._seqid)
args = drop_type_args()
args.type = type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_type(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_type_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_type failed: unknown result");
def get_type_all(self, name):
"""
Parameters:
- name
"""
self.send_get_type_all(name)
return self.recv_get_type_all()
def send_get_type_all(self, name):
self._oprot.writeMessageBegin('get_type_all', TMessageType.CALL, self._seqid)
args = get_type_all_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_type_all(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_type_all_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_type_all failed: unknown result");
def get_fields(self, db_name, table_name):
"""
Parameters:
- db_name
- table_name
"""
self.send_get_fields(db_name, table_name)
return self.recv_get_fields()
def send_get_fields(self, db_name, table_name):
self._oprot.writeMessageBegin('get_fields', TMessageType.CALL, self._seqid)
args = get_fields_args()
args.db_name = db_name
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_fields(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_fields_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields failed: unknown result");
def get_fields_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
self.send_get_fields_with_environment_context(db_name, table_name, environment_context)
return self.recv_get_fields_with_environment_context()
def send_get_fields_with_environment_context(self, db_name, table_name, environment_context):
self._oprot.writeMessageBegin('get_fields_with_environment_context', TMessageType.CALL, self._seqid)
args = get_fields_with_environment_context_args()
args.db_name = db_name
args.table_name = table_name
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_fields_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_fields_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_fields_with_environment_context failed: unknown result");
def get_schema(self, db_name, table_name):
"""
Parameters:
- db_name
- table_name
"""
self.send_get_schema(db_name, table_name)
return self.recv_get_schema()
def send_get_schema(self, db_name, table_name):
self._oprot.writeMessageBegin('get_schema', TMessageType.CALL, self._seqid)
args = get_schema_args()
args.db_name = db_name
args.table_name = table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_schema(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_schema_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema failed: unknown result");
def get_schema_with_environment_context(self, db_name, table_name, environment_context):
"""
Parameters:
- db_name
- table_name
- environment_context
"""
self.send_get_schema_with_environment_context(db_name, table_name, environment_context)
return self.recv_get_schema_with_environment_context()
def send_get_schema_with_environment_context(self, db_name, table_name, environment_context):
self._oprot.writeMessageBegin('get_schema_with_environment_context', TMessageType.CALL, self._seqid)
args = get_schema_with_environment_context_args()
args.db_name = db_name
args.table_name = table_name
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_schema_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_schema_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_schema_with_environment_context failed: unknown result");
def create_table(self, tbl):
"""
Parameters:
- tbl
"""
self.send_create_table(tbl)
self.recv_create_table()
def send_create_table(self, tbl):
self._oprot.writeMessageBegin('create_table', TMessageType.CALL, self._seqid)
args = create_table_args()
args.tbl = tbl
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_table(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_table_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
return
def create_table_with_environment_context(self, tbl, environment_context):
"""
Parameters:
- tbl
- environment_context
"""
self.send_create_table_with_environment_context(tbl, environment_context)
self.recv_create_table_with_environment_context()
def send_create_table_with_environment_context(self, tbl, environment_context):
self._oprot.writeMessageBegin('create_table_with_environment_context', TMessageType.CALL, self._seqid)
args = create_table_with_environment_context_args()
args.tbl = tbl
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_table_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_table_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
return
def drop_table(self, dbname, name, deleteData):
"""
Parameters:
- dbname
- name
- deleteData
"""
self.send_drop_table(dbname, name, deleteData)
self.recv_drop_table()
def send_drop_table(self, dbname, name, deleteData):
self._oprot.writeMessageBegin('drop_table', TMessageType.CALL, self._seqid)
args = drop_table_args()
args.dbname = dbname
args.name = name
args.deleteData = deleteData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_table(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_table_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o3 is not None:
raise result.o3
return
def drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
"""
Parameters:
- dbname
- name
- deleteData
- environment_context
"""
self.send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
self.recv_drop_table_with_environment_context()
def send_drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
self._oprot.writeMessageBegin('drop_table_with_environment_context', TMessageType.CALL, self._seqid)
args = drop_table_with_environment_context_args()
args.dbname = dbname
args.name = name
args.deleteData = deleteData
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_table_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_table_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o3 is not None:
raise result.o3
return
def get_tables(self, db_name, pattern):
"""
Parameters:
- db_name
- pattern
"""
self.send_get_tables(db_name, pattern)
return self.recv_get_tables()
def send_get_tables(self, db_name, pattern):
self._oprot.writeMessageBegin('get_tables', TMessageType.CALL, self._seqid)
args = get_tables_args()
args.db_name = db_name
args.pattern = pattern
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_tables(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_tables_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_tables failed: unknown result");
def get_all_tables(self, db_name):
"""
Parameters:
- db_name
"""
self.send_get_all_tables(db_name)
return self.recv_get_all_tables()
def send_get_all_tables(self, db_name):
self._oprot.writeMessageBegin('get_all_tables', TMessageType.CALL, self._seqid)
args = get_all_tables_args()
args.db_name = db_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_all_tables(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_all_tables_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_all_tables failed: unknown result");
def get_table(self, dbname, tbl_name):
"""
Parameters:
- dbname
- tbl_name
"""
self.send_get_table(dbname, tbl_name)
return self.recv_get_table()
def send_get_table(self, dbname, tbl_name):
self._oprot.writeMessageBegin('get_table', TMessageType.CALL, self._seqid)
args = get_table_args()
args.dbname = dbname
args.tbl_name = tbl_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_table_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table failed: unknown result");
def get_table_objects_by_name(self, dbname, tbl_names):
"""
Parameters:
- dbname
- tbl_names
"""
self.send_get_table_objects_by_name(dbname, tbl_names)
return self.recv_get_table_objects_by_name()
def send_get_table_objects_by_name(self, dbname, tbl_names):
self._oprot.writeMessageBegin('get_table_objects_by_name', TMessageType.CALL, self._seqid)
args = get_table_objects_by_name_args()
args.dbname = dbname
args.tbl_names = tbl_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_objects_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_table_objects_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_objects_by_name failed: unknown result");
def get_table_names_by_filter(self, dbname, filter, max_tables):
"""
Parameters:
- dbname
- filter
- max_tables
"""
self.send_get_table_names_by_filter(dbname, filter, max_tables)
return self.recv_get_table_names_by_filter()
def send_get_table_names_by_filter(self, dbname, filter, max_tables):
self._oprot.writeMessageBegin('get_table_names_by_filter', TMessageType.CALL, self._seqid)
args = get_table_names_by_filter_args()
args.dbname = dbname
args.filter = filter
args.max_tables = max_tables
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_names_by_filter(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_table_names_by_filter_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_names_by_filter failed: unknown result");
def alter_table(self, dbname, tbl_name, new_tbl):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
"""
self.send_alter_table(dbname, tbl_name, new_tbl)
self.recv_alter_table()
def send_alter_table(self, dbname, tbl_name, new_tbl):
self._oprot.writeMessageBegin('alter_table', TMessageType.CALL, self._seqid)
args = alter_table_args()
args.dbname = dbname
args.tbl_name = tbl_name
args.new_tbl = new_tbl
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_table(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_table_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def alter_table_with_environment_context(self, dbname, tbl_name, new_tbl, environment_context):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
- environment_context
"""
self.send_alter_table_with_environment_context(dbname, tbl_name, new_tbl, environment_context)
self.recv_alter_table_with_environment_context()
def send_alter_table_with_environment_context(self, dbname, tbl_name, new_tbl, environment_context):
self._oprot.writeMessageBegin('alter_table_with_environment_context', TMessageType.CALL, self._seqid)
args = alter_table_with_environment_context_args()
args.dbname = dbname
args.tbl_name = tbl_name
args.new_tbl = new_tbl
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_table_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_table_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def alter_table_with_cascade(self, dbname, tbl_name, new_tbl, cascade):
"""
Parameters:
- dbname
- tbl_name
- new_tbl
- cascade
"""
self.send_alter_table_with_cascade(dbname, tbl_name, new_tbl, cascade)
self.recv_alter_table_with_cascade()
def send_alter_table_with_cascade(self, dbname, tbl_name, new_tbl, cascade):
self._oprot.writeMessageBegin('alter_table_with_cascade', TMessageType.CALL, self._seqid)
args = alter_table_with_cascade_args()
args.dbname = dbname
args.tbl_name = tbl_name
args.new_tbl = new_tbl
args.cascade = cascade
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_table_with_cascade(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_table_with_cascade_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def add_partition(self, new_part):
"""
Parameters:
- new_part
"""
self.send_add_partition(new_part)
return self.recv_add_partition()
def send_add_partition(self, new_part):
self._oprot.writeMessageBegin('add_partition', TMessageType.CALL, self._seqid)
args = add_partition_args()
args.new_part = new_part
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition failed: unknown result");
def add_partition_with_environment_context(self, new_part, environment_context):
"""
Parameters:
- new_part
- environment_context
"""
self.send_add_partition_with_environment_context(new_part, environment_context)
return self.recv_add_partition_with_environment_context()
def send_add_partition_with_environment_context(self, new_part, environment_context):
self._oprot.writeMessageBegin('add_partition_with_environment_context', TMessageType.CALL, self._seqid)
args = add_partition_with_environment_context_args()
args.new_part = new_part
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_partition_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_partition_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partition_with_environment_context failed: unknown result");
def add_partitions(self, new_parts):
"""
Parameters:
- new_parts
"""
self.send_add_partitions(new_parts)
return self.recv_add_partitions()
def send_add_partitions(self, new_parts):
self._oprot.writeMessageBegin('add_partitions', TMessageType.CALL, self._seqid)
args = add_partitions_args()
args.new_parts = new_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_partitions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_partitions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions failed: unknown result");
def add_partitions_pspec(self, new_parts):
"""
Parameters:
- new_parts
"""
self.send_add_partitions_pspec(new_parts)
return self.recv_add_partitions_pspec()
def send_add_partitions_pspec(self, new_parts):
self._oprot.writeMessageBegin('add_partitions_pspec', TMessageType.CALL, self._seqid)
args = add_partitions_pspec_args()
args.new_parts = new_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_partitions_pspec(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_partitions_pspec_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_pspec failed: unknown result");
def append_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
self.send_append_partition(db_name, tbl_name, part_vals)
return self.recv_append_partition()
def send_append_partition(self, db_name, tbl_name, part_vals):
self._oprot.writeMessageBegin('append_partition', TMessageType.CALL, self._seqid)
args = append_partition_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_append_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = append_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition failed: unknown result");
def add_partitions_req(self, request):
"""
Parameters:
- request
"""
self.send_add_partitions_req(request)
return self.recv_add_partitions_req()
def send_add_partitions_req(self, request):
self._oprot.writeMessageBegin('add_partitions_req', TMessageType.CALL, self._seqid)
args = add_partitions_req_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_partitions_req(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_partitions_req_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_partitions_req failed: unknown result");
def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- environment_context
"""
self.send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
return self.recv_append_partition_with_environment_context()
def send_append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
self._oprot.writeMessageBegin('append_partition_with_environment_context', TMessageType.CALL, self._seqid)
args = append_partition_with_environment_context_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_append_partition_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = append_partition_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result");
def append_partition_by_name(self, db_name, tbl_name, part_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
"""
self.send_append_partition_by_name(db_name, tbl_name, part_name)
return self.recv_append_partition_by_name()
def send_append_partition_by_name(self, db_name, tbl_name, part_name):
self._oprot.writeMessageBegin('append_partition_by_name', TMessageType.CALL, self._seqid)
args = append_partition_by_name_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_append_partition_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = append_partition_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result");
def append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_name
- environment_context
"""
self.send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
return self.recv_append_partition_by_name_with_environment_context()
def send_append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
self._oprot.writeMessageBegin('append_partition_by_name_with_environment_context', TMessageType.CALL, self._seqid)
args = append_partition_by_name_with_environment_context_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_append_partition_by_name_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = append_partition_by_name_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result");
def drop_partition(self, db_name, tbl_name, part_vals, deleteData):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- deleteData
"""
self.send_drop_partition(db_name, tbl_name, part_vals, deleteData)
return self.recv_drop_partition()
def send_drop_partition(self, db_name, tbl_name, part_vals, deleteData):
self._oprot.writeMessageBegin('drop_partition', TMessageType.CALL, self._seqid)
args = drop_partition_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.deleteData = deleteData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result");
def drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- deleteData
- environment_context
"""
self.send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
return self.recv_drop_partition_with_environment_context()
def send_drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
self._oprot.writeMessageBegin('drop_partition_with_environment_context', TMessageType.CALL, self._seqid)
args = drop_partition_with_environment_context_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.deleteData = deleteData
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_partition_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_partition_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result");
def drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
"""
Parameters:
- db_name
- tbl_name
- part_name
- deleteData
"""
self.send_drop_partition_by_name(db_name, tbl_name, part_name, deleteData)
return self.recv_drop_partition_by_name()
def send_drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
self._oprot.writeMessageBegin('drop_partition_by_name', TMessageType.CALL, self._seqid)
args = drop_partition_by_name_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.deleteData = deleteData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_partition_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_partition_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");
def drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
"""
Parameters:
- db_name
- tbl_name
- part_name
- deleteData
- environment_context
"""
self.send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
return self.recv_drop_partition_by_name_with_environment_context()
def send_drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
self._oprot.writeMessageBegin('drop_partition_by_name_with_environment_context', TMessageType.CALL, self._seqid)
args = drop_partition_by_name_with_environment_context_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.deleteData = deleteData
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_partition_by_name_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_partition_by_name_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result");
def drop_partitions_req(self, req):
"""
Parameters:
- req
"""
self.send_drop_partitions_req(req)
return self.recv_drop_partitions_req()
def send_drop_partitions_req(self, req):
self._oprot.writeMessageBegin('drop_partitions_req', TMessageType.CALL, self._seqid)
args = drop_partitions_req_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_partitions_req(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_partitions_req_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partitions_req failed: unknown result");
def get_partition(self, db_name, tbl_name, part_vals):
"""
Parameters:
- db_name
- tbl_name
- part_vals
"""
self.send_get_partition(db_name, tbl_name, part_vals)
return self.recv_get_partition()
def send_get_partition(self, db_name, tbl_name, part_vals):
self._oprot.writeMessageBegin('get_partition', TMessageType.CALL, self._seqid)
args = get_partition_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition failed: unknown result");
def exchange_partition(self, partitionSpecs, source_db, source_table_name, dest_db, dest_table_name):
"""
Parameters:
- partitionSpecs
- source_db
- source_table_name
- dest_db
- dest_table_name
"""
self.send_exchange_partition(partitionSpecs, source_db, source_table_name, dest_db, dest_table_name)
return self.recv_exchange_partition()
def send_exchange_partition(self, partitionSpecs, source_db, source_table_name, dest_db, dest_table_name):
self._oprot.writeMessageBegin('exchange_partition', TMessageType.CALL, self._seqid)
args = exchange_partition_args()
args.partitionSpecs = partitionSpecs
args.source_db = source_db
args.source_table_name = source_table_name
args.dest_db = dest_db
args.dest_table_name = dest_table_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_exchange_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = exchange_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "exchange_partition failed: unknown result");
def get_partition_with_auth(self, db_name, tbl_name, part_vals, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- user_name
- group_names
"""
self.send_get_partition_with_auth(db_name, tbl_name, part_vals, user_name, group_names)
return self.recv_get_partition_with_auth()
def send_get_partition_with_auth(self, db_name, tbl_name, part_vals, user_name, group_names):
self._oprot.writeMessageBegin('get_partition_with_auth', TMessageType.CALL, self._seqid)
args = get_partition_with_auth_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.user_name = user_name
args.group_names = group_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition_with_auth(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_with_auth_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_with_auth failed: unknown result");
def get_partition_by_name(self, db_name, tbl_name, part_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
"""
self.send_get_partition_by_name(db_name, tbl_name, part_name)
return self.recv_get_partition_by_name()
def send_get_partition_by_name(self, db_name, tbl_name, part_name):
self._oprot.writeMessageBegin('get_partition_by_name', TMessageType.CALL, self._seqid)
args = get_partition_by_name_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_by_name failed: unknown result");
def get_partitions(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
self.send_get_partitions(db_name, tbl_name, max_parts)
return self.recv_get_partitions()
def send_get_partitions(self, db_name, tbl_name, max_parts):
self._oprot.writeMessageBegin('get_partitions', TMessageType.CALL, self._seqid)
args = get_partitions_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions failed: unknown result");
def get_partitions_with_auth(self, db_name, tbl_name, max_parts, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- max_parts
- user_name
- group_names
"""
self.send_get_partitions_with_auth(db_name, tbl_name, max_parts, user_name, group_names)
return self.recv_get_partitions_with_auth()
def send_get_partitions_with_auth(self, db_name, tbl_name, max_parts, user_name, group_names):
self._oprot.writeMessageBegin('get_partitions_with_auth', TMessageType.CALL, self._seqid)
args = get_partitions_with_auth_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_parts = max_parts
args.user_name = user_name
args.group_names = group_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_with_auth(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_with_auth_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_with_auth failed: unknown result");
def get_partitions_pspec(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
self.send_get_partitions_pspec(db_name, tbl_name, max_parts)
return self.recv_get_partitions_pspec()
def send_get_partitions_pspec(self, db_name, tbl_name, max_parts):
self._oprot.writeMessageBegin('get_partitions_pspec', TMessageType.CALL, self._seqid)
args = get_partitions_pspec_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_pspec(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_pspec_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_pspec failed: unknown result");
def get_partition_names(self, db_name, tbl_name, max_parts):
"""
Parameters:
- db_name
- tbl_name
- max_parts
"""
self.send_get_partition_names(db_name, tbl_name, max_parts)
return self.recv_get_partition_names()
def send_get_partition_names(self, db_name, tbl_name, max_parts):
self._oprot.writeMessageBegin('get_partition_names', TMessageType.CALL, self._seqid)
args = get_partition_names_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition_names(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names failed: unknown result");
def get_partitions_ps(self, db_name, tbl_name, part_vals, max_parts):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
"""
self.send_get_partitions_ps(db_name, tbl_name, part_vals, max_parts)
return self.recv_get_partitions_ps()
def send_get_partitions_ps(self, db_name, tbl_name, part_vals, max_parts):
self._oprot.writeMessageBegin('get_partitions_ps', TMessageType.CALL, self._seqid)
args = get_partitions_ps_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_ps(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_ps_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps failed: unknown result");
def get_partitions_ps_with_auth(self, db_name, tbl_name, part_vals, max_parts, user_name, group_names):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
- user_name
- group_names
"""
self.send_get_partitions_ps_with_auth(db_name, tbl_name, part_vals, max_parts, user_name, group_names)
return self.recv_get_partitions_ps_with_auth()
def send_get_partitions_ps_with_auth(self, db_name, tbl_name, part_vals, max_parts, user_name, group_names):
self._oprot.writeMessageBegin('get_partitions_ps_with_auth', TMessageType.CALL, self._seqid)
args = get_partitions_ps_with_auth_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.max_parts = max_parts
args.user_name = user_name
args.group_names = group_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_ps_with_auth(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_ps_with_auth_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_ps_with_auth failed: unknown result");
def get_partition_names_ps(self, db_name, tbl_name, part_vals, max_parts):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- max_parts
"""
self.send_get_partition_names_ps(db_name, tbl_name, part_vals, max_parts)
return self.recv_get_partition_names_ps()
def send_get_partition_names_ps(self, db_name, tbl_name, part_vals, max_parts):
self._oprot.writeMessageBegin('get_partition_names_ps', TMessageType.CALL, self._seqid)
args = get_partition_names_ps_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition_names_ps(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_names_ps_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_names_ps failed: unknown result");
def get_partitions_by_filter(self, db_name, tbl_name, filter, max_parts):
"""
Parameters:
- db_name
- tbl_name
- filter
- max_parts
"""
self.send_get_partitions_by_filter(db_name, tbl_name, filter, max_parts)
return self.recv_get_partitions_by_filter()
def send_get_partitions_by_filter(self, db_name, tbl_name, filter, max_parts):
self._oprot.writeMessageBegin('get_partitions_by_filter', TMessageType.CALL, self._seqid)
args = get_partitions_by_filter_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.filter = filter
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_by_filter(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_by_filter_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_filter failed: unknown result");
def get_part_specs_by_filter(self, db_name, tbl_name, filter, max_parts):
"""
Parameters:
- db_name
- tbl_name
- filter
- max_parts
"""
self.send_get_part_specs_by_filter(db_name, tbl_name, filter, max_parts)
return self.recv_get_part_specs_by_filter()
def send_get_part_specs_by_filter(self, db_name, tbl_name, filter, max_parts):
self._oprot.writeMessageBegin('get_part_specs_by_filter', TMessageType.CALL, self._seqid)
args = get_part_specs_by_filter_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.filter = filter
args.max_parts = max_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_part_specs_by_filter(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_part_specs_by_filter_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_part_specs_by_filter failed: unknown result");
def get_partitions_by_expr(self, req):
"""
Parameters:
- req
"""
self.send_get_partitions_by_expr(req)
return self.recv_get_partitions_by_expr()
def send_get_partitions_by_expr(self, req):
self._oprot.writeMessageBegin('get_partitions_by_expr', TMessageType.CALL, self._seqid)
args = get_partitions_by_expr_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_by_expr(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_by_expr_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_expr failed: unknown result");
def get_partitions_by_names(self, db_name, tbl_name, names):
"""
Parameters:
- db_name
- tbl_name
- names
"""
self.send_get_partitions_by_names(db_name, tbl_name, names)
return self.recv_get_partitions_by_names()
def send_get_partitions_by_names(self, db_name, tbl_name, names):
self._oprot.writeMessageBegin('get_partitions_by_names', TMessageType.CALL, self._seqid)
args = get_partitions_by_names_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.names = names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_by_names(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_by_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_by_names failed: unknown result");
def alter_partition(self, db_name, tbl_name, new_part):
"""
Parameters:
- db_name
- tbl_name
- new_part
"""
self.send_alter_partition(db_name, tbl_name, new_part)
self.recv_alter_partition()
def send_alter_partition(self, db_name, tbl_name, new_part):
self._oprot.writeMessageBegin('alter_partition', TMessageType.CALL, self._seqid)
args = alter_partition_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.new_part = new_part
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def alter_partitions(self, db_name, tbl_name, new_parts):
"""
Parameters:
- db_name
- tbl_name
- new_parts
"""
self.send_alter_partitions(db_name, tbl_name, new_parts)
self.recv_alter_partitions()
def send_alter_partitions(self, db_name, tbl_name, new_parts):
self._oprot.writeMessageBegin('alter_partitions', TMessageType.CALL, self._seqid)
args = alter_partitions_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.new_parts = new_parts
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_partitions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_partitions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def alter_partition_with_environment_context(self, db_name, tbl_name, new_part, environment_context):
"""
Parameters:
- db_name
- tbl_name
- new_part
- environment_context
"""
self.send_alter_partition_with_environment_context(db_name, tbl_name, new_part, environment_context)
self.recv_alter_partition_with_environment_context()
def send_alter_partition_with_environment_context(self, db_name, tbl_name, new_part, environment_context):
self._oprot.writeMessageBegin('alter_partition_with_environment_context', TMessageType.CALL, self._seqid)
args = alter_partition_with_environment_context_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.new_part = new_part
args.environment_context = environment_context
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_partition_with_environment_context(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_partition_with_environment_context_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def rename_partition(self, db_name, tbl_name, part_vals, new_part):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- new_part
"""
self.send_rename_partition(db_name, tbl_name, part_vals, new_part)
self.recv_rename_partition()
def send_rename_partition(self, db_name, tbl_name, part_vals, new_part):
self._oprot.writeMessageBegin('rename_partition', TMessageType.CALL, self._seqid)
args = rename_partition_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.new_part = new_part
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_rename_partition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = rename_partition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def partition_name_has_valid_characters(self, part_vals, throw_exception):
"""
Parameters:
- part_vals
- throw_exception
"""
self.send_partition_name_has_valid_characters(part_vals, throw_exception)
return self.recv_partition_name_has_valid_characters()
def send_partition_name_has_valid_characters(self, part_vals, throw_exception):
self._oprot.writeMessageBegin('partition_name_has_valid_characters', TMessageType.CALL, self._seqid)
args = partition_name_has_valid_characters_args()
args.part_vals = part_vals
args.throw_exception = throw_exception
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_partition_name_has_valid_characters(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = partition_name_has_valid_characters_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_has_valid_characters failed: unknown result");
def get_config_value(self, name, defaultValue):
"""
Parameters:
- name
- defaultValue
"""
self.send_get_config_value(name, defaultValue)
return self.recv_get_config_value()
def send_get_config_value(self, name, defaultValue):
self._oprot.writeMessageBegin('get_config_value', TMessageType.CALL, self._seqid)
args = get_config_value_args()
args.name = name
args.defaultValue = defaultValue
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_config_value(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_config_value_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_config_value failed: unknown result");
def partition_name_to_vals(self, part_name):
"""
Parameters:
- part_name
"""
self.send_partition_name_to_vals(part_name)
return self.recv_partition_name_to_vals()
def send_partition_name_to_vals(self, part_name):
self._oprot.writeMessageBegin('partition_name_to_vals', TMessageType.CALL, self._seqid)
args = partition_name_to_vals_args()
args.part_name = part_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_partition_name_to_vals(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = partition_name_to_vals_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_vals failed: unknown result");
def partition_name_to_spec(self, part_name):
"""
Parameters:
- part_name
"""
self.send_partition_name_to_spec(part_name)
return self.recv_partition_name_to_spec()
def send_partition_name_to_spec(self, part_name):
self._oprot.writeMessageBegin('partition_name_to_spec', TMessageType.CALL, self._seqid)
args = partition_name_to_spec_args()
args.part_name = part_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_partition_name_to_spec(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = partition_name_to_spec_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "partition_name_to_spec failed: unknown result");
def markPartitionForEvent(self, db_name, tbl_name, part_vals, eventType):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- eventType
"""
self.send_markPartitionForEvent(db_name, tbl_name, part_vals, eventType)
self.recv_markPartitionForEvent()
def send_markPartitionForEvent(self, db_name, tbl_name, part_vals, eventType):
self._oprot.writeMessageBegin('markPartitionForEvent', TMessageType.CALL, self._seqid)
args = markPartitionForEvent_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.eventType = eventType
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_markPartitionForEvent(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = markPartitionForEvent_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
if result.o5 is not None:
raise result.o5
if result.o6 is not None:
raise result.o6
return
def isPartitionMarkedForEvent(self, db_name, tbl_name, part_vals, eventType):
"""
Parameters:
- db_name
- tbl_name
- part_vals
- eventType
"""
self.send_isPartitionMarkedForEvent(db_name, tbl_name, part_vals, eventType)
return self.recv_isPartitionMarkedForEvent()
def send_isPartitionMarkedForEvent(self, db_name, tbl_name, part_vals, eventType):
self._oprot.writeMessageBegin('isPartitionMarkedForEvent', TMessageType.CALL, self._seqid)
args = isPartitionMarkedForEvent_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_vals = part_vals
args.eventType = eventType
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_isPartitionMarkedForEvent(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = isPartitionMarkedForEvent_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
if result.o5 is not None:
raise result.o5
if result.o6 is not None:
raise result.o6
raise TApplicationException(TApplicationException.MISSING_RESULT, "isPartitionMarkedForEvent failed: unknown result");
def add_index(self, new_index, index_table):
"""
Parameters:
- new_index
- index_table
"""
self.send_add_index(new_index, index_table)
return self.recv_add_index()
def send_add_index(self, new_index, index_table):
self._oprot.writeMessageBegin('add_index', TMessageType.CALL, self._seqid)
args = add_index_args()
args.new_index = new_index
args.index_table = index_table
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_index(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_index_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_index failed: unknown result");
def alter_index(self, dbname, base_tbl_name, idx_name, new_idx):
"""
Parameters:
- dbname
- base_tbl_name
- idx_name
- new_idx
"""
self.send_alter_index(dbname, base_tbl_name, idx_name, new_idx)
self.recv_alter_index()
def send_alter_index(self, dbname, base_tbl_name, idx_name, new_idx):
self._oprot.writeMessageBegin('alter_index', TMessageType.CALL, self._seqid)
args = alter_index_args()
args.dbname = dbname
args.base_tbl_name = base_tbl_name
args.idx_name = idx_name
args.new_idx = new_idx
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_index(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_index_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def drop_index_by_name(self, db_name, tbl_name, index_name, deleteData):
"""
Parameters:
- db_name
- tbl_name
- index_name
- deleteData
"""
self.send_drop_index_by_name(db_name, tbl_name, index_name, deleteData)
return self.recv_drop_index_by_name()
def send_drop_index_by_name(self, db_name, tbl_name, index_name, deleteData):
self._oprot.writeMessageBegin('drop_index_by_name', TMessageType.CALL, self._seqid)
args = drop_index_by_name_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.index_name = index_name
args.deleteData = deleteData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_index_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_index_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_index_by_name failed: unknown result");
def get_index_by_name(self, db_name, tbl_name, index_name):
"""
Parameters:
- db_name
- tbl_name
- index_name
"""
self.send_get_index_by_name(db_name, tbl_name, index_name)
return self.recv_get_index_by_name()
def send_get_index_by_name(self, db_name, tbl_name, index_name):
self._oprot.writeMessageBegin('get_index_by_name', TMessageType.CALL, self._seqid)
args = get_index_by_name_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.index_name = index_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_index_by_name(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_index_by_name_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_by_name failed: unknown result");
def get_indexes(self, db_name, tbl_name, max_indexes):
"""
Parameters:
- db_name
- tbl_name
- max_indexes
"""
self.send_get_indexes(db_name, tbl_name, max_indexes)
return self.recv_get_indexes()
def send_get_indexes(self, db_name, tbl_name, max_indexes):
self._oprot.writeMessageBegin('get_indexes', TMessageType.CALL, self._seqid)
args = get_indexes_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_indexes = max_indexes
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_indexes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_indexes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_indexes failed: unknown result");
def get_index_names(self, db_name, tbl_name, max_indexes):
"""
Parameters:
- db_name
- tbl_name
- max_indexes
"""
self.send_get_index_names(db_name, tbl_name, max_indexes)
return self.recv_get_index_names()
def send_get_index_names(self, db_name, tbl_name, max_indexes):
self._oprot.writeMessageBegin('get_index_names', TMessageType.CALL, self._seqid)
args = get_index_names_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.max_indexes = max_indexes
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_index_names(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_index_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_index_names failed: unknown result");
def update_table_column_statistics(self, stats_obj):
"""
Parameters:
- stats_obj
"""
self.send_update_table_column_statistics(stats_obj)
return self.recv_update_table_column_statistics()
def send_update_table_column_statistics(self, stats_obj):
self._oprot.writeMessageBegin('update_table_column_statistics', TMessageType.CALL, self._seqid)
args = update_table_column_statistics_args()
args.stats_obj = stats_obj
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_update_table_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = update_table_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "update_table_column_statistics failed: unknown result");
def update_partition_column_statistics(self, stats_obj):
"""
Parameters:
- stats_obj
"""
self.send_update_partition_column_statistics(stats_obj)
return self.recv_update_partition_column_statistics()
def send_update_partition_column_statistics(self, stats_obj):
self._oprot.writeMessageBegin('update_partition_column_statistics', TMessageType.CALL, self._seqid)
args = update_partition_column_statistics_args()
args.stats_obj = stats_obj
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_update_partition_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = update_partition_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "update_partition_column_statistics failed: unknown result");
def get_table_column_statistics(self, db_name, tbl_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- col_name
"""
self.send_get_table_column_statistics(db_name, tbl_name, col_name)
return self.recv_get_table_column_statistics()
def send_get_table_column_statistics(self, db_name, tbl_name, col_name):
self._oprot.writeMessageBegin('get_table_column_statistics', TMessageType.CALL, self._seqid)
args = get_table_column_statistics_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.col_name = col_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_table_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_column_statistics failed: unknown result");
def get_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
- col_name
"""
self.send_get_partition_column_statistics(db_name, tbl_name, part_name, col_name)
return self.recv_get_partition_column_statistics()
def send_get_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
self._oprot.writeMessageBegin('get_partition_column_statistics', TMessageType.CALL, self._seqid)
args = get_partition_column_statistics_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.col_name = col_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partition_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partition_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partition_column_statistics failed: unknown result");
def get_table_statistics_req(self, request):
"""
Parameters:
- request
"""
self.send_get_table_statistics_req(request)
return self.recv_get_table_statistics_req()
def send_get_table_statistics_req(self, request):
self._oprot.writeMessageBegin('get_table_statistics_req', TMessageType.CALL, self._seqid)
args = get_table_statistics_req_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_table_statistics_req(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_table_statistics_req_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_table_statistics_req failed: unknown result");
def get_partitions_statistics_req(self, request):
"""
Parameters:
- request
"""
self.send_get_partitions_statistics_req(request)
return self.recv_get_partitions_statistics_req()
def send_get_partitions_statistics_req(self, request):
self._oprot.writeMessageBegin('get_partitions_statistics_req', TMessageType.CALL, self._seqid)
args = get_partitions_statistics_req_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_partitions_statistics_req(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_partitions_statistics_req_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_partitions_statistics_req failed: unknown result");
def get_aggr_stats_for(self, request):
"""
Parameters:
- request
"""
self.send_get_aggr_stats_for(request)
return self.recv_get_aggr_stats_for()
def send_get_aggr_stats_for(self, request):
self._oprot.writeMessageBegin('get_aggr_stats_for', TMessageType.CALL, self._seqid)
args = get_aggr_stats_for_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_aggr_stats_for(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_aggr_stats_for_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_aggr_stats_for failed: unknown result");
def set_aggr_stats_for(self, request):
"""
Parameters:
- request
"""
self.send_set_aggr_stats_for(request)
return self.recv_set_aggr_stats_for()
def send_set_aggr_stats_for(self, request):
self._oprot.writeMessageBegin('set_aggr_stats_for', TMessageType.CALL, self._seqid)
args = set_aggr_stats_for_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_aggr_stats_for(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_aggr_stats_for_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "set_aggr_stats_for failed: unknown result");
def delete_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
- col_name
"""
self.send_delete_partition_column_statistics(db_name, tbl_name, part_name, col_name)
return self.recv_delete_partition_column_statistics()
def send_delete_partition_column_statistics(self, db_name, tbl_name, part_name, col_name):
self._oprot.writeMessageBegin('delete_partition_column_statistics', TMessageType.CALL, self._seqid)
args = delete_partition_column_statistics_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.part_name = part_name
args.col_name = col_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delete_partition_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = delete_partition_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_partition_column_statistics failed: unknown result");
def delete_table_column_statistics(self, db_name, tbl_name, col_name):
"""
Parameters:
- db_name
- tbl_name
- col_name
"""
self.send_delete_table_column_statistics(db_name, tbl_name, col_name)
return self.recv_delete_table_column_statistics()
def send_delete_table_column_statistics(self, db_name, tbl_name, col_name):
self._oprot.writeMessageBegin('delete_table_column_statistics', TMessageType.CALL, self._seqid)
args = delete_table_column_statistics_args()
args.db_name = db_name
args.tbl_name = tbl_name
args.col_name = col_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_delete_table_column_statistics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = delete_table_column_statistics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
raise TApplicationException(TApplicationException.MISSING_RESULT, "delete_table_column_statistics failed: unknown result");
def create_function(self, func):
"""
Parameters:
- func
"""
self.send_create_function(func)
self.recv_create_function()
def send_create_function(self, func):
self._oprot.writeMessageBegin('create_function', TMessageType.CALL, self._seqid)
args = create_function_args()
args.func = func
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_function(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_function_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
if result.o4 is not None:
raise result.o4
return
def drop_function(self, dbName, funcName):
"""
Parameters:
- dbName
- funcName
"""
self.send_drop_function(dbName, funcName)
self.recv_drop_function()
def send_drop_function(self, dbName, funcName):
self._oprot.writeMessageBegin('drop_function', TMessageType.CALL, self._seqid)
args = drop_function_args()
args.dbName = dbName
args.funcName = funcName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_function(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_function_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o3 is not None:
raise result.o3
return
def alter_function(self, dbName, funcName, newFunc):
"""
Parameters:
- dbName
- funcName
- newFunc
"""
self.send_alter_function(dbName, funcName, newFunc)
self.recv_alter_function()
def send_alter_function(self, dbName, funcName, newFunc):
self._oprot.writeMessageBegin('alter_function', TMessageType.CALL, self._seqid)
args = alter_function_args()
args.dbName = dbName
args.funcName = funcName
args.newFunc = newFunc
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_alter_function(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = alter_function_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def get_functions(self, dbName, pattern):
"""
Parameters:
- dbName
- pattern
"""
self.send_get_functions(dbName, pattern)
return self.recv_get_functions()
def send_get_functions(self, dbName, pattern):
self._oprot.writeMessageBegin('get_functions', TMessageType.CALL, self._seqid)
args = get_functions_args()
args.dbName = dbName
args.pattern = pattern
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_functions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_functions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_functions failed: unknown result");
def get_function(self, dbName, funcName):
"""
Parameters:
- dbName
- funcName
"""
self.send_get_function(dbName, funcName)
return self.recv_get_function()
def send_get_function(self, dbName, funcName):
self._oprot.writeMessageBegin('get_function', TMessageType.CALL, self._seqid)
args = get_function_args()
args.dbName = dbName
args.funcName = funcName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_function(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_function_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_function failed: unknown result");
def create_role(self, role):
"""
Parameters:
- role
"""
self.send_create_role(role)
return self.recv_create_role()
def send_create_role(self, role):
self._oprot.writeMessageBegin('create_role', TMessageType.CALL, self._seqid)
args = create_role_args()
args.role = role
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_create_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = create_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "create_role failed: unknown result");
def drop_role(self, role_name):
"""
Parameters:
- role_name
"""
self.send_drop_role(role_name)
return self.recv_drop_role()
def send_drop_role(self, role_name):
self._oprot.writeMessageBegin('drop_role', TMessageType.CALL, self._seqid)
args = drop_role_args()
args.role_name = role_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_drop_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = drop_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_role failed: unknown result");
def get_role_names(self, ):
self.send_get_role_names()
return self.recv_get_role_names()
def send_get_role_names(self, ):
self._oprot.writeMessageBegin('get_role_names', TMessageType.CALL, self._seqid)
args = get_role_names_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_role_names(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_role_names_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_names failed: unknown result");
def grant_role(self, role_name, principal_name, principal_type, grantor, grantorType, grant_option):
"""
Parameters:
- role_name
- principal_name
- principal_type
- grantor
- grantorType
- grant_option
"""
self.send_grant_role(role_name, principal_name, principal_type, grantor, grantorType, grant_option)
return self.recv_grant_role()
def send_grant_role(self, role_name, principal_name, principal_type, grantor, grantorType, grant_option):
self._oprot.writeMessageBegin('grant_role', TMessageType.CALL, self._seqid)
args = grant_role_args()
args.role_name = role_name
args.principal_name = principal_name
args.principal_type = principal_type
args.grantor = grantor
args.grantorType = grantorType
args.grant_option = grant_option
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_grant_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = grant_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_role failed: unknown result");
def revoke_role(self, role_name, principal_name, principal_type):
"""
Parameters:
- role_name
- principal_name
- principal_type
"""
self.send_revoke_role(role_name, principal_name, principal_type)
return self.recv_revoke_role()
def send_revoke_role(self, role_name, principal_name, principal_type):
self._oprot.writeMessageBegin('revoke_role', TMessageType.CALL, self._seqid)
args = revoke_role_args()
args.role_name = role_name
args.principal_name = principal_name
args.principal_type = principal_type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_revoke_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = revoke_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_role failed: unknown result");
def list_roles(self, principal_name, principal_type):
"""
Parameters:
- principal_name
- principal_type
"""
self.send_list_roles(principal_name, principal_type)
return self.recv_list_roles()
def send_list_roles(self, principal_name, principal_type):
self._oprot.writeMessageBegin('list_roles', TMessageType.CALL, self._seqid)
args = list_roles_args()
args.principal_name = principal_name
args.principal_type = principal_type
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_list_roles(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = list_roles_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "list_roles failed: unknown result");
def grant_revoke_role(self, request):
"""
Parameters:
- request
"""
self.send_grant_revoke_role(request)
return self.recv_grant_revoke_role()
def send_grant_revoke_role(self, request):
self._oprot.writeMessageBegin('grant_revoke_role', TMessageType.CALL, self._seqid)
args = grant_revoke_role_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_grant_revoke_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = grant_revoke_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_role failed: unknown result");
def get_principals_in_role(self, request):
"""
Parameters:
- request
"""
self.send_get_principals_in_role(request)
return self.recv_get_principals_in_role()
def send_get_principals_in_role(self, request):
self._oprot.writeMessageBegin('get_principals_in_role', TMessageType.CALL, self._seqid)
args = get_principals_in_role_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_principals_in_role(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_principals_in_role_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_principals_in_role failed: unknown result");
def get_role_grants_for_principal(self, request):
"""
Parameters:
- request
"""
self.send_get_role_grants_for_principal(request)
return self.recv_get_role_grants_for_principal()
def send_get_role_grants_for_principal(self, request):
self._oprot.writeMessageBegin('get_role_grants_for_principal', TMessageType.CALL, self._seqid)
args = get_role_grants_for_principal_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_role_grants_for_principal(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_role_grants_for_principal_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_role_grants_for_principal failed: unknown result");
def get_privilege_set(self, hiveObject, user_name, group_names):
"""
Parameters:
- hiveObject
- user_name
- group_names
"""
self.send_get_privilege_set(hiveObject, user_name, group_names)
return self.recv_get_privilege_set()
def send_get_privilege_set(self, hiveObject, user_name, group_names):
self._oprot.writeMessageBegin('get_privilege_set', TMessageType.CALL, self._seqid)
args = get_privilege_set_args()
args.hiveObject = hiveObject
args.user_name = user_name
args.group_names = group_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_privilege_set(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_privilege_set_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_privilege_set failed: unknown result");
def list_privileges(self, principal_name, principal_type, hiveObject):
"""
Parameters:
- principal_name
- principal_type
- hiveObject
"""
self.send_list_privileges(principal_name, principal_type, hiveObject)
return self.recv_list_privileges()
def send_list_privileges(self, principal_name, principal_type, hiveObject):
self._oprot.writeMessageBegin('list_privileges', TMessageType.CALL, self._seqid)
args = list_privileges_args()
args.principal_name = principal_name
args.principal_type = principal_type
args.hiveObject = hiveObject
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_list_privileges(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = list_privileges_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "list_privileges failed: unknown result");
def grant_privileges(self, privileges):
"""
Parameters:
- privileges
"""
self.send_grant_privileges(privileges)
return self.recv_grant_privileges()
def send_grant_privileges(self, privileges):
self._oprot.writeMessageBegin('grant_privileges', TMessageType.CALL, self._seqid)
args = grant_privileges_args()
args.privileges = privileges
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_grant_privileges(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = grant_privileges_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_privileges failed: unknown result");
def revoke_privileges(self, privileges):
"""
Parameters:
- privileges
"""
self.send_revoke_privileges(privileges)
return self.recv_revoke_privileges()
def send_revoke_privileges(self, privileges):
self._oprot.writeMessageBegin('revoke_privileges', TMessageType.CALL, self._seqid)
args = revoke_privileges_args()
args.privileges = privileges
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_revoke_privileges(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = revoke_privileges_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "revoke_privileges failed: unknown result");
def grant_revoke_privileges(self, request):
"""
Parameters:
- request
"""
self.send_grant_revoke_privileges(request)
return self.recv_grant_revoke_privileges()
def send_grant_revoke_privileges(self, request):
self._oprot.writeMessageBegin('grant_revoke_privileges', TMessageType.CALL, self._seqid)
args = grant_revoke_privileges_args()
args.request = request
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_grant_revoke_privileges(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = grant_revoke_privileges_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "grant_revoke_privileges failed: unknown result");
def set_ugi(self, user_name, group_names):
"""
Parameters:
- user_name
- group_names
"""
self.send_set_ugi(user_name, group_names)
return self.recv_set_ugi()
def send_set_ugi(self, user_name, group_names):
self._oprot.writeMessageBegin('set_ugi', TMessageType.CALL, self._seqid)
args = set_ugi_args()
args.user_name = user_name
args.group_names = group_names
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_set_ugi(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = set_ugi_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "set_ugi failed: unknown result");
def get_delegation_token(self, token_owner, renewer_kerberos_principal_name):
"""
Parameters:
- token_owner
- renewer_kerberos_principal_name
"""
self.send_get_delegation_token(token_owner, renewer_kerberos_principal_name)
return self.recv_get_delegation_token()
def send_get_delegation_token(self, token_owner, renewer_kerberos_principal_name):
self._oprot.writeMessageBegin('get_delegation_token', TMessageType.CALL, self._seqid)
args = get_delegation_token_args()
args.token_owner = token_owner
args.renewer_kerberos_principal_name = renewer_kerberos_principal_name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_delegation_token(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_delegation_token_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_delegation_token failed: unknown result");
def renew_delegation_token(self, token_str_form):
"""
Parameters:
- token_str_form
"""
self.send_renew_delegation_token(token_str_form)
return self.recv_renew_delegation_token()
def send_renew_delegation_token(self, token_str_form):
self._oprot.writeMessageBegin('renew_delegation_token', TMessageType.CALL, self._seqid)
args = renew_delegation_token_args()
args.token_str_form = token_str_form
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_renew_delegation_token(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = renew_delegation_token_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
raise TApplicationException(TApplicationException.MISSING_RESULT, "renew_delegation_token failed: unknown result");
def cancel_delegation_token(self, token_str_form):
"""
Parameters:
- token_str_form
"""
self.send_cancel_delegation_token(token_str_form)
self.recv_cancel_delegation_token()
def send_cancel_delegation_token(self, token_str_form):
self._oprot.writeMessageBegin('cancel_delegation_token', TMessageType.CALL, self._seqid)
args = cancel_delegation_token_args()
args.token_str_form = token_str_form
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_cancel_delegation_token(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = cancel_delegation_token_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
return
def get_open_txns(self, ):
self.send_get_open_txns()
return self.recv_get_open_txns()
def send_get_open_txns(self, ):
self._oprot.writeMessageBegin('get_open_txns', TMessageType.CALL, self._seqid)
args = get_open_txns_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_open_txns(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_open_txns_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns failed: unknown result");
def get_open_txns_info(self, ):
self.send_get_open_txns_info()
return self.recv_get_open_txns_info()
def send_get_open_txns_info(self, ):
self._oprot.writeMessageBegin('get_open_txns_info', TMessageType.CALL, self._seqid)
args = get_open_txns_info_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_open_txns_info(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_open_txns_info_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_open_txns_info failed: unknown result");
def open_txns(self, rqst):
"""
Parameters:
- rqst
"""
self.send_open_txns(rqst)
return self.recv_open_txns()
def send_open_txns(self, rqst):
self._oprot.writeMessageBegin('open_txns', TMessageType.CALL, self._seqid)
args = open_txns_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_open_txns(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = open_txns_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "open_txns failed: unknown result");
def abort_txn(self, rqst):
"""
Parameters:
- rqst
"""
self.send_abort_txn(rqst)
self.recv_abort_txn()
def send_abort_txn(self, rqst):
self._oprot.writeMessageBegin('abort_txn', TMessageType.CALL, self._seqid)
args = abort_txn_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_abort_txn(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = abort_txn_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
return
def commit_txn(self, rqst):
"""
Parameters:
- rqst
"""
self.send_commit_txn(rqst)
self.recv_commit_txn()
def send_commit_txn(self, rqst):
self._oprot.writeMessageBegin('commit_txn', TMessageType.CALL, self._seqid)
args = commit_txn_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_commit_txn(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = commit_txn_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def lock(self, rqst):
"""
Parameters:
- rqst
"""
self.send_lock(rqst)
return self.recv_lock()
def send_lock(self, rqst):
self._oprot.writeMessageBegin('lock', TMessageType.CALL, self._seqid)
args = lock_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_lock(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = lock_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
raise TApplicationException(TApplicationException.MISSING_RESULT, "lock failed: unknown result");
def check_lock(self, rqst):
"""
Parameters:
- rqst
"""
self.send_check_lock(rqst)
return self.recv_check_lock()
def send_check_lock(self, rqst):
self._oprot.writeMessageBegin('check_lock', TMessageType.CALL, self._seqid)
args = check_lock_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_check_lock(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = check_lock_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
raise TApplicationException(TApplicationException.MISSING_RESULT, "check_lock failed: unknown result");
def unlock(self, rqst):
"""
Parameters:
- rqst
"""
self.send_unlock(rqst)
self.recv_unlock()
def send_unlock(self, rqst):
self._oprot.writeMessageBegin('unlock', TMessageType.CALL, self._seqid)
args = unlock_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_unlock(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = unlock_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
return
def show_locks(self, rqst):
"""
Parameters:
- rqst
"""
self.send_show_locks(rqst)
return self.recv_show_locks()
def send_show_locks(self, rqst):
self._oprot.writeMessageBegin('show_locks', TMessageType.CALL, self._seqid)
args = show_locks_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_show_locks(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = show_locks_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "show_locks failed: unknown result");
def heartbeat(self, ids):
"""
Parameters:
- ids
"""
self.send_heartbeat(ids)
self.recv_heartbeat()
def send_heartbeat(self, ids):
self._oprot.writeMessageBegin('heartbeat', TMessageType.CALL, self._seqid)
args = heartbeat_args()
args.ids = ids
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_heartbeat(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = heartbeat_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.o1 is not None:
raise result.o1
if result.o2 is not None:
raise result.o2
if result.o3 is not None:
raise result.o3
return
def heartbeat_txn_range(self, txns):
"""
Parameters:
- txns
"""
self.send_heartbeat_txn_range(txns)
return self.recv_heartbeat_txn_range()
def send_heartbeat_txn_range(self, txns):
self._oprot.writeMessageBegin('heartbeat_txn_range', TMessageType.CALL, self._seqid)
args = heartbeat_txn_range_args()
args.txns = txns
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_heartbeat_txn_range(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = heartbeat_txn_range_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "heartbeat_txn_range failed: unknown result");
def compact(self, rqst):
"""
Parameters:
- rqst
"""
self.send_compact(rqst)
self.recv_compact()
def send_compact(self, rqst):
self._oprot.writeMessageBegin('compact', TMessageType.CALL, self._seqid)
args = compact_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_compact(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = compact_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
return
def show_compact(self, rqst):
"""
Parameters:
- rqst
"""
self.send_show_compact(rqst)
return self.recv_show_compact()
def send_show_compact(self, rqst):
self._oprot.writeMessageBegin('show_compact', TMessageType.CALL, self._seqid)
args = show_compact_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_show_compact(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = show_compact_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "show_compact failed: unknown result");
def get_next_notification(self, rqst):
"""
Parameters:
- rqst
"""
self.send_get_next_notification(rqst)
return self.recv_get_next_notification()
def send_get_next_notification(self, rqst):
self._oprot.writeMessageBegin('get_next_notification', TMessageType.CALL, self._seqid)
args = get_next_notification_args()
args.rqst = rqst
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_next_notification(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_next_notification_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_next_notification failed: unknown result");
def get_current_notificationEventId(self, ):
self.send_get_current_notificationEventId()
return self.recv_get_current_notificationEventId()
def send_get_current_notificationEventId(self, ):
self._oprot.writeMessageBegin('get_current_notificationEventId', TMessageType.CALL, self._seqid)
args = get_current_notificationEventId_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_current_notificationEventId(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_current_notificationEventId_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_current_notificationEventId failed: unknown result");
class Processor(fb303.FacebookService.Processor, Iface, TProcessor):
def __init__(self, handler):
fb303.FacebookService.Processor.__init__(self, handler)
self._processMap["getMetaConf"] = Processor.process_getMetaConf
self._processMap["setMetaConf"] = Processor.process_setMetaConf
self._processMap["create_database"] = Processor.process_create_database
self._processMap["get_database"] = Processor.process_get_database
self._processMap["drop_database"] = Processor.process_drop_database
self._processMap["get_databases"] = Processor.process_get_databases
self._processMap["get_all_databases"] = Processor.process_get_all_databases
self._processMap["alter_database"] = Processor.process_alter_database
self._processMap["get_type"] = Processor.process_get_type
self._processMap["create_type"] = Processor.process_create_type
self._processMap["drop_type"] = Processor.process_drop_type
self._processMap["get_type_all"] = Processor.process_get_type_all
self._processMap["get_fields"] = Processor.process_get_fields
self._processMap["get_fields_with_environment_context"] = Processor.process_get_fields_with_environment_context
self._processMap["get_schema"] = Processor.process_get_schema
self._processMap["get_schema_with_environment_context"] = Processor.process_get_schema_with_environment_context
self._processMap["create_table"] = Processor.process_create_table
self._processMap["create_table_with_environment_context"] = Processor.process_create_table_with_environment_context
self._processMap["drop_table"] = Processor.process_drop_table
self._processMap["drop_table_with_environment_context"] = Processor.process_drop_table_with_environment_context
self._processMap["get_tables"] = Processor.process_get_tables
self._processMap["get_all_tables"] = Processor.process_get_all_tables
self._processMap["get_table"] = Processor.process_get_table
self._processMap["get_table_objects_by_name"] = Processor.process_get_table_objects_by_name
self._processMap["get_table_names_by_filter"] = Processor.process_get_table_names_by_filter
self._processMap["alter_table"] = Processor.process_alter_table
self._processMap["alter_table_with_environment_context"] = Processor.process_alter_table_with_environment_context
self._processMap["alter_table_with_cascade"] = Processor.process_alter_table_with_cascade
self._processMap["add_partition"] = Processor.process_add_partition
self._processMap["add_partition_with_environment_context"] = Processor.process_add_partition_with_environment_context
self._processMap["add_partitions"] = Processor.process_add_partitions
self._processMap["add_partitions_pspec"] = Processor.process_add_partitions_pspec
self._processMap["append_partition"] = Processor.process_append_partition
self._processMap["add_partitions_req"] = Processor.process_add_partitions_req
self._processMap["append_partition_with_environment_context"] = Processor.process_append_partition_with_environment_context
self._processMap["append_partition_by_name"] = Processor.process_append_partition_by_name
self._processMap["append_partition_by_name_with_environment_context"] = Processor.process_append_partition_by_name_with_environment_context
self._processMap["drop_partition"] = Processor.process_drop_partition
self._processMap["drop_partition_with_environment_context"] = Processor.process_drop_partition_with_environment_context
self._processMap["drop_partition_by_name"] = Processor.process_drop_partition_by_name
self._processMap["drop_partition_by_name_with_environment_context"] = Processor.process_drop_partition_by_name_with_environment_context
self._processMap["drop_partitions_req"] = Processor.process_drop_partitions_req
self._processMap["get_partition"] = Processor.process_get_partition
self._processMap["exchange_partition"] = Processor.process_exchange_partition
self._processMap["get_partition_with_auth"] = Processor.process_get_partition_with_auth
self._processMap["get_partition_by_name"] = Processor.process_get_partition_by_name
self._processMap["get_partitions"] = Processor.process_get_partitions
self._processMap["get_partitions_with_auth"] = Processor.process_get_partitions_with_auth
self._processMap["get_partitions_pspec"] = Processor.process_get_partitions_pspec
self._processMap["get_partition_names"] = Processor.process_get_partition_names
self._processMap["get_partitions_ps"] = Processor.process_get_partitions_ps
self._processMap["get_partitions_ps_with_auth"] = Processor.process_get_partitions_ps_with_auth
self._processMap["get_partition_names_ps"] = Processor.process_get_partition_names_ps
self._processMap["get_partitions_by_filter"] = Processor.process_get_partitions_by_filter
self._processMap["get_part_specs_by_filter"] = Processor.process_get_part_specs_by_filter
self._processMap["get_partitions_by_expr"] = Processor.process_get_partitions_by_expr
self._processMap["get_partitions_by_names"] = Processor.process_get_partitions_by_names
self._processMap["alter_partition"] = Processor.process_alter_partition
self._processMap["alter_partitions"] = Processor.process_alter_partitions
self._processMap["alter_partition_with_environment_context"] = Processor.process_alter_partition_with_environment_context
self._processMap["rename_partition"] = Processor.process_rename_partition
self._processMap["partition_name_has_valid_characters"] = Processor.process_partition_name_has_valid_characters
self._processMap["get_config_value"] = Processor.process_get_config_value
self._processMap["partition_name_to_vals"] = Processor.process_partition_name_to_vals
self._processMap["partition_name_to_spec"] = Processor.process_partition_name_to_spec
self._processMap["markPartitionForEvent"] = Processor.process_markPartitionForEvent
self._processMap["isPartitionMarkedForEvent"] = Processor.process_isPartitionMarkedForEvent
self._processMap["add_index"] = Processor.process_add_index
self._processMap["alter_index"] = Processor.process_alter_index
self._processMap["drop_index_by_name"] = Processor.process_drop_index_by_name
self._processMap["get_index_by_name"] = Processor.process_get_index_by_name
self._processMap["get_indexes"] = Processor.process_get_indexes
self._processMap["get_index_names"] = Processor.process_get_index_names
self._processMap["update_table_column_statistics"] = Processor.process_update_table_column_statistics
self._processMap["update_partition_column_statistics"] = Processor.process_update_partition_column_statistics
self._processMap["get_table_column_statistics"] = Processor.process_get_table_column_statistics
self._processMap["get_partition_column_statistics"] = Processor.process_get_partition_column_statistics
self._processMap["get_table_statistics_req"] = Processor.process_get_table_statistics_req
self._processMap["get_partitions_statistics_req"] = Processor.process_get_partitions_statistics_req
self._processMap["get_aggr_stats_for"] = Processor.process_get_aggr_stats_for
self._processMap["set_aggr_stats_for"] = Processor.process_set_aggr_stats_for
self._processMap["delete_partition_column_statistics"] = Processor.process_delete_partition_column_statistics
self._processMap["delete_table_column_statistics"] = Processor.process_delete_table_column_statistics
self._processMap["create_function"] = Processor.process_create_function
self._processMap["drop_function"] = Processor.process_drop_function
self._processMap["alter_function"] = Processor.process_alter_function
self._processMap["get_functions"] = Processor.process_get_functions
self._processMap["get_function"] = Processor.process_get_function
self._processMap["create_role"] = Processor.process_create_role
self._processMap["drop_role"] = Processor.process_drop_role
self._processMap["get_role_names"] = Processor.process_get_role_names
self._processMap["grant_role"] = Processor.process_grant_role
self._processMap["revoke_role"] = Processor.process_revoke_role
self._processMap["list_roles"] = Processor.process_list_roles
self._processMap["grant_revoke_role"] = Processor.process_grant_revoke_role
self._processMap["get_principals_in_role"] = Processor.process_get_principals_in_role
self._processMap["get_role_grants_for_principal"] = Processor.process_get_role_grants_for_principal
self._processMap["get_privilege_set"] = Processor.process_get_privilege_set
self._processMap["list_privileges"] = Processor.process_list_privileges
self._processMap["grant_privileges"] = Processor.process_grant_privileges
self._processMap["revoke_privileges"] = Processor.process_revoke_privileges
self._processMap["grant_revoke_privileges"] = Processor.process_grant_revoke_privileges
self._processMap["set_ugi"] = Processor.process_set_ugi
self._processMap["get_delegation_token"] = Processor.process_get_delegation_token
self._processMap["renew_delegation_token"] = Processor.process_renew_delegation_token
self._processMap["cancel_delegation_token"] = Processor.process_cancel_delegation_token
self._processMap["get_open_txns"] = Processor.process_get_open_txns
self._processMap["get_open_txns_info"] = Processor.process_get_open_txns_info
self._processMap["open_txns"] = Processor.process_open_txns
self._processMap["abort_txn"] = Processor.process_abort_txn
self._processMap["commit_txn"] = Processor.process_commit_txn
self._processMap["lock"] = Processor.process_lock
self._processMap["check_lock"] = Processor.process_check_lock
self._processMap["unlock"] = Processor.process_unlock
self._processMap["show_locks"] = Processor.process_show_locks
self._processMap["heartbeat"] = Processor.process_heartbeat
self._processMap["heartbeat_txn_range"] = Processor.process_heartbeat_txn_range
self._processMap["compact"] = Processor.process_compact
self._processMap["show_compact"] = Processor.process_show_compact
self._processMap["get_next_notification"] = Processor.process_get_next_notification
self._processMap["get_current_notificationEventId"] = Processor.process_get_current_notificationEventId
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_getMetaConf(self, seqid, iprot, oprot):
args = getMetaConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getMetaConf_result()
try:
result.success = self._handler.getMetaConf(args.key)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("getMetaConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setMetaConf(self, seqid, iprot, oprot):
args = setMetaConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = setMetaConf_result()
try:
self._handler.setMetaConf(args.key, args.value)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("setMetaConf", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_database(self, seqid, iprot, oprot):
args = create_database_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_database_result()
try:
self._handler.create_database(args.database)
except AlreadyExistsException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("create_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_database(self, seqid, iprot, oprot):
args = get_database_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_database_result()
try:
result.success = self._handler.get_database(args.name)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_database(self, seqid, iprot, oprot):
args = drop_database_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_database_result()
try:
self._handler.drop_database(args.name, args.deleteData, args.cascade)
except NoSuchObjectException as o1:
result.o1 = o1
except InvalidOperationException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("drop_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_databases(self, seqid, iprot, oprot):
args = get_databases_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_databases_result()
try:
result.success = self._handler.get_databases(args.pattern)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_databases", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_all_databases(self, seqid, iprot, oprot):
args = get_all_databases_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_all_databases_result()
try:
result.success = self._handler.get_all_databases()
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_all_databases", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_database(self, seqid, iprot, oprot):
args = alter_database_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_database_result()
try:
self._handler.alter_database(args.dbname, args.db)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_database", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_type(self, seqid, iprot, oprot):
args = get_type_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_type_result()
try:
result.success = self._handler.get_type(args.name)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_type", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_type(self, seqid, iprot, oprot):
args = create_type_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_type_result()
try:
result.success = self._handler.create_type(args.type)
except AlreadyExistsException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("create_type", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_type(self, seqid, iprot, oprot):
args = drop_type_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_type_result()
try:
result.success = self._handler.drop_type(args.type)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_type", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_type_all(self, seqid, iprot, oprot):
args = get_type_all_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_type_all_result()
try:
result.success = self._handler.get_type_all(args.name)
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_type_all", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_fields(self, seqid, iprot, oprot):
args = get_fields_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_fields_result()
try:
result.success = self._handler.get_fields(args.db_name, args.table_name)
except MetaException as o1:
result.o1 = o1
except UnknownTableException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_fields", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_fields_with_environment_context(self, seqid, iprot, oprot):
args = get_fields_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_fields_with_environment_context_result()
try:
result.success = self._handler.get_fields_with_environment_context(args.db_name, args.table_name, args.environment_context)
except MetaException as o1:
result.o1 = o1
except UnknownTableException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_fields_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_schema(self, seqid, iprot, oprot):
args = get_schema_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_schema_result()
try:
result.success = self._handler.get_schema(args.db_name, args.table_name)
except MetaException as o1:
result.o1 = o1
except UnknownTableException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_schema", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_schema_with_environment_context(self, seqid, iprot, oprot):
args = get_schema_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_schema_with_environment_context_result()
try:
result.success = self._handler.get_schema_with_environment_context(args.db_name, args.table_name, args.environment_context)
except MetaException as o1:
result.o1 = o1
except UnknownTableException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_schema_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_table(self, seqid, iprot, oprot):
args = create_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_table_result()
try:
self._handler.create_table(args.tbl)
except AlreadyExistsException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except NoSuchObjectException as o4:
result.o4 = o4
oprot.writeMessageBegin("create_table", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_table_with_environment_context(self, seqid, iprot, oprot):
args = create_table_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_table_with_environment_context_result()
try:
self._handler.create_table_with_environment_context(args.tbl, args.environment_context)
except AlreadyExistsException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except NoSuchObjectException as o4:
result.o4 = o4
oprot.writeMessageBegin("create_table_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_table(self, seqid, iprot, oprot):
args = drop_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_table_result()
try:
self._handler.drop_table(args.dbname, args.name, args.deleteData)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("drop_table", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_table_with_environment_context(self, seqid, iprot, oprot):
args = drop_table_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_table_with_environment_context_result()
try:
self._handler.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("drop_table_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_tables(self, seqid, iprot, oprot):
args = get_tables_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_tables_result()
try:
result.success = self._handler.get_tables(args.db_name, args.pattern)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_tables", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_all_tables(self, seqid, iprot, oprot):
args = get_all_tables_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_all_tables_result()
try:
result.success = self._handler.get_all_tables(args.db_name)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_all_tables", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table(self, seqid, iprot, oprot):
args = get_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_result()
try:
result.success = self._handler.get_table(args.dbname, args.tbl_name)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_table", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_objects_by_name(self, seqid, iprot, oprot):
args = get_table_objects_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_objects_by_name_result()
try:
result.success = self._handler.get_table_objects_by_name(args.dbname, args.tbl_names)
except MetaException as o1:
result.o1 = o1
except InvalidOperationException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_table_objects_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_names_by_filter(self, seqid, iprot, oprot):
args = get_table_names_by_filter_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_names_by_filter_result()
try:
result.success = self._handler.get_table_names_by_filter(args.dbname, args.filter, args.max_tables)
except MetaException as o1:
result.o1 = o1
except InvalidOperationException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
oprot.writeMessageBegin("get_table_names_by_filter", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_table(self, seqid, iprot, oprot):
args = alter_table_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_table_result()
try:
self._handler.alter_table(args.dbname, args.tbl_name, args.new_tbl)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_table", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_table_with_environment_context(self, seqid, iprot, oprot):
args = alter_table_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_table_with_environment_context_result()
try:
self._handler.alter_table_with_environment_context(args.dbname, args.tbl_name, args.new_tbl, args.environment_context)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_table_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_table_with_cascade(self, seqid, iprot, oprot):
args = alter_table_with_cascade_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_table_with_cascade_result()
try:
self._handler.alter_table_with_cascade(args.dbname, args.tbl_name, args.new_tbl, args.cascade)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_table_with_cascade", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_partition(self, seqid, iprot, oprot):
args = add_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_partition_result()
try:
result.success = self._handler.add_partition(args.new_part)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_partition_with_environment_context(self, seqid, iprot, oprot):
args = add_partition_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_partition_with_environment_context_result()
try:
result.success = self._handler.add_partition_with_environment_context(args.new_part, args.environment_context)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_partition_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_partitions(self, seqid, iprot, oprot):
args = add_partitions_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_partitions_result()
try:
result.success = self._handler.add_partitions(args.new_parts)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_partitions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_partitions_pspec(self, seqid, iprot, oprot):
args = add_partitions_pspec_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_partitions_pspec_result()
try:
result.success = self._handler.add_partitions_pspec(args.new_parts)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_partitions_pspec", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_append_partition(self, seqid, iprot, oprot):
args = append_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = append_partition_result()
try:
result.success = self._handler.append_partition(args.db_name, args.tbl_name, args.part_vals)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("append_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_partitions_req(self, seqid, iprot, oprot):
args = add_partitions_req_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_partitions_req_result()
try:
result.success = self._handler.add_partitions_req(args.request)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_partitions_req", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_append_partition_with_environment_context(self, seqid, iprot, oprot):
args = append_partition_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = append_partition_with_environment_context_result()
try:
result.success = self._handler.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("append_partition_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_append_partition_by_name(self, seqid, iprot, oprot):
args = append_partition_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = append_partition_by_name_result()
try:
result.success = self._handler.append_partition_by_name(args.db_name, args.tbl_name, args.part_name)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("append_partition_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_append_partition_by_name_with_environment_context(self, seqid, iprot, oprot):
args = append_partition_by_name_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = append_partition_by_name_with_environment_context_result()
try:
result.success = self._handler.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("append_partition_by_name_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_partition(self, seqid, iprot, oprot):
args = drop_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_partition_result()
try:
result.success = self._handler.drop_partition(args.db_name, args.tbl_name, args.part_vals, args.deleteData)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_partition_with_environment_context(self, seqid, iprot, oprot):
args = drop_partition_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_partition_with_environment_context_result()
try:
result.success = self._handler.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_partition_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_partition_by_name(self, seqid, iprot, oprot):
args = drop_partition_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_partition_by_name_result()
try:
result.success = self._handler.drop_partition_by_name(args.db_name, args.tbl_name, args.part_name, args.deleteData)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_partition_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_partition_by_name_with_environment_context(self, seqid, iprot, oprot):
args = drop_partition_by_name_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_partition_by_name_with_environment_context_result()
try:
result.success = self._handler.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_partition_by_name_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_partitions_req(self, seqid, iprot, oprot):
args = drop_partitions_req_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_partitions_req_result()
try:
result.success = self._handler.drop_partitions_req(args.req)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_partitions_req", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition(self, seqid, iprot, oprot):
args = get_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_result()
try:
result.success = self._handler.get_partition(args.db_name, args.tbl_name, args.part_vals)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_exchange_partition(self, seqid, iprot, oprot):
args = exchange_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = exchange_partition_result()
try:
result.success = self._handler.exchange_partition(args.partitionSpecs, args.source_db, args.source_table_name, args.dest_db, args.dest_table_name)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
except InvalidObjectException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("exchange_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition_with_auth(self, seqid, iprot, oprot):
args = get_partition_with_auth_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_with_auth_result()
try:
result.success = self._handler.get_partition_with_auth(args.db_name, args.tbl_name, args.part_vals, args.user_name, args.group_names)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partition_with_auth", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition_by_name(self, seqid, iprot, oprot):
args = get_partition_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_by_name_result()
try:
result.success = self._handler.get_partition_by_name(args.db_name, args.tbl_name, args.part_name)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partition_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions(self, seqid, iprot, oprot):
args = get_partitions_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_result()
try:
result.success = self._handler.get_partitions(args.db_name, args.tbl_name, args.max_parts)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_with_auth(self, seqid, iprot, oprot):
args = get_partitions_with_auth_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_with_auth_result()
try:
result.success = self._handler.get_partitions_with_auth(args.db_name, args.tbl_name, args.max_parts, args.user_name, args.group_names)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_with_auth", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_pspec(self, seqid, iprot, oprot):
args = get_partitions_pspec_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_pspec_result()
try:
result.success = self._handler.get_partitions_pspec(args.db_name, args.tbl_name, args.max_parts)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_pspec", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition_names(self, seqid, iprot, oprot):
args = get_partition_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_names_result()
try:
result.success = self._handler.get_partition_names(args.db_name, args.tbl_name, args.max_parts)
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partition_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_ps(self, seqid, iprot, oprot):
args = get_partitions_ps_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_ps_result()
try:
result.success = self._handler.get_partitions_ps(args.db_name, args.tbl_name, args.part_vals, args.max_parts)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_ps", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_ps_with_auth(self, seqid, iprot, oprot):
args = get_partitions_ps_with_auth_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_ps_with_auth_result()
try:
result.success = self._handler.get_partitions_ps_with_auth(args.db_name, args.tbl_name, args.part_vals, args.max_parts, args.user_name, args.group_names)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_ps_with_auth", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition_names_ps(self, seqid, iprot, oprot):
args = get_partition_names_ps_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_names_ps_result()
try:
result.success = self._handler.get_partition_names_ps(args.db_name, args.tbl_name, args.part_vals, args.max_parts)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partition_names_ps", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_by_filter(self, seqid, iprot, oprot):
args = get_partitions_by_filter_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_by_filter_result()
try:
result.success = self._handler.get_partitions_by_filter(args.db_name, args.tbl_name, args.filter, args.max_parts)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_by_filter", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_part_specs_by_filter(self, seqid, iprot, oprot):
args = get_part_specs_by_filter_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_part_specs_by_filter_result()
try:
result.success = self._handler.get_part_specs_by_filter(args.db_name, args.tbl_name, args.filter, args.max_parts)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_part_specs_by_filter", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_by_expr(self, seqid, iprot, oprot):
args = get_partitions_by_expr_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_by_expr_result()
try:
result.success = self._handler.get_partitions_by_expr(args.req)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_by_expr", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_by_names(self, seqid, iprot, oprot):
args = get_partitions_by_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_by_names_result()
try:
result.success = self._handler.get_partitions_by_names(args.db_name, args.tbl_name, args.names)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_by_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_partition(self, seqid, iprot, oprot):
args = alter_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_partition_result()
try:
self._handler.alter_partition(args.db_name, args.tbl_name, args.new_part)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_partitions(self, seqid, iprot, oprot):
args = alter_partitions_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_partitions_result()
try:
self._handler.alter_partitions(args.db_name, args.tbl_name, args.new_parts)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_partitions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_partition_with_environment_context(self, seqid, iprot, oprot):
args = alter_partition_with_environment_context_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_partition_with_environment_context_result()
try:
self._handler.alter_partition_with_environment_context(args.db_name, args.tbl_name, args.new_part, args.environment_context)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_partition_with_environment_context", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_rename_partition(self, seqid, iprot, oprot):
args = rename_partition_args()
args.read(iprot)
iprot.readMessageEnd()
result = rename_partition_result()
try:
self._handler.rename_partition(args.db_name, args.tbl_name, args.part_vals, args.new_part)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("rename_partition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_partition_name_has_valid_characters(self, seqid, iprot, oprot):
args = partition_name_has_valid_characters_args()
args.read(iprot)
iprot.readMessageEnd()
result = partition_name_has_valid_characters_result()
try:
result.success = self._handler.partition_name_has_valid_characters(args.part_vals, args.throw_exception)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("partition_name_has_valid_characters", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_config_value(self, seqid, iprot, oprot):
args = get_config_value_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_config_value_result()
try:
result.success = self._handler.get_config_value(args.name, args.defaultValue)
except ConfigValSecurityException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_config_value", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_partition_name_to_vals(self, seqid, iprot, oprot):
args = partition_name_to_vals_args()
args.read(iprot)
iprot.readMessageEnd()
result = partition_name_to_vals_result()
try:
result.success = self._handler.partition_name_to_vals(args.part_name)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("partition_name_to_vals", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_partition_name_to_spec(self, seqid, iprot, oprot):
args = partition_name_to_spec_args()
args.read(iprot)
iprot.readMessageEnd()
result = partition_name_to_spec_result()
try:
result.success = self._handler.partition_name_to_spec(args.part_name)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("partition_name_to_spec", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_markPartitionForEvent(self, seqid, iprot, oprot):
args = markPartitionForEvent_args()
args.read(iprot)
iprot.readMessageEnd()
result = markPartitionForEvent_result()
try:
self._handler.markPartitionForEvent(args.db_name, args.tbl_name, args.part_vals, args.eventType)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
except UnknownTableException as o4:
result.o4 = o4
except UnknownPartitionException as o5:
result.o5 = o5
except InvalidPartitionException as o6:
result.o6 = o6
oprot.writeMessageBegin("markPartitionForEvent", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_isPartitionMarkedForEvent(self, seqid, iprot, oprot):
args = isPartitionMarkedForEvent_args()
args.read(iprot)
iprot.readMessageEnd()
result = isPartitionMarkedForEvent_result()
try:
result.success = self._handler.isPartitionMarkedForEvent(args.db_name, args.tbl_name, args.part_vals, args.eventType)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
except UnknownDBException as o3:
result.o3 = o3
except UnknownTableException as o4:
result.o4 = o4
except UnknownPartitionException as o5:
result.o5 = o5
except InvalidPartitionException as o6:
result.o6 = o6
oprot.writeMessageBegin("isPartitionMarkedForEvent", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_index(self, seqid, iprot, oprot):
args = add_index_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_index_result()
try:
result.success = self._handler.add_index(args.new_index, args.index_table)
except InvalidObjectException as o1:
result.o1 = o1
except AlreadyExistsException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("add_index", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_index(self, seqid, iprot, oprot):
args = alter_index_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_index_result()
try:
self._handler.alter_index(args.dbname, args.base_tbl_name, args.idx_name, args.new_idx)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_index", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_index_by_name(self, seqid, iprot, oprot):
args = drop_index_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_index_by_name_result()
try:
result.success = self._handler.drop_index_by_name(args.db_name, args.tbl_name, args.index_name, args.deleteData)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("drop_index_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_index_by_name(self, seqid, iprot, oprot):
args = get_index_by_name_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_index_by_name_result()
try:
result.success = self._handler.get_index_by_name(args.db_name, args.tbl_name, args.index_name)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_index_by_name", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_indexes(self, seqid, iprot, oprot):
args = get_indexes_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_indexes_result()
try:
result.success = self._handler.get_indexes(args.db_name, args.tbl_name, args.max_indexes)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_indexes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_index_names(self, seqid, iprot, oprot):
args = get_index_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_index_names_result()
try:
result.success = self._handler.get_index_names(args.db_name, args.tbl_name, args.max_indexes)
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_index_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_update_table_column_statistics(self, seqid, iprot, oprot):
args = update_table_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = update_table_column_statistics_result()
try:
result.success = self._handler.update_table_column_statistics(args.stats_obj)
except NoSuchObjectException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("update_table_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_update_partition_column_statistics(self, seqid, iprot, oprot):
args = update_partition_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = update_partition_column_statistics_result()
try:
result.success = self._handler.update_partition_column_statistics(args.stats_obj)
except NoSuchObjectException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("update_partition_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_column_statistics(self, seqid, iprot, oprot):
args = get_table_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_column_statistics_result()
try:
result.success = self._handler.get_table_column_statistics(args.db_name, args.tbl_name, args.col_name)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
except InvalidInputException as o3:
result.o3 = o3
except InvalidObjectException as o4:
result.o4 = o4
oprot.writeMessageBegin("get_table_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partition_column_statistics(self, seqid, iprot, oprot):
args = get_partition_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partition_column_statistics_result()
try:
result.success = self._handler.get_partition_column_statistics(args.db_name, args.tbl_name, args.part_name, args.col_name)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
except InvalidInputException as o3:
result.o3 = o3
except InvalidObjectException as o4:
result.o4 = o4
oprot.writeMessageBegin("get_partition_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_table_statistics_req(self, seqid, iprot, oprot):
args = get_table_statistics_req_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_table_statistics_req_result()
try:
result.success = self._handler.get_table_statistics_req(args.request)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_table_statistics_req", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_partitions_statistics_req(self, seqid, iprot, oprot):
args = get_partitions_statistics_req_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_partitions_statistics_req_result()
try:
result.success = self._handler.get_partitions_statistics_req(args.request)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_partitions_statistics_req", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_aggr_stats_for(self, seqid, iprot, oprot):
args = get_aggr_stats_for_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_aggr_stats_for_result()
try:
result.success = self._handler.get_aggr_stats_for(args.request)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_aggr_stats_for", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_aggr_stats_for(self, seqid, iprot, oprot):
args = set_aggr_stats_for_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_aggr_stats_for_result()
try:
result.success = self._handler.set_aggr_stats_for(args.request)
except NoSuchObjectException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("set_aggr_stats_for", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delete_partition_column_statistics(self, seqid, iprot, oprot):
args = delete_partition_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = delete_partition_column_statistics_result()
try:
result.success = self._handler.delete_partition_column_statistics(args.db_name, args.tbl_name, args.part_name, args.col_name)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
except InvalidObjectException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("delete_partition_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_delete_table_column_statistics(self, seqid, iprot, oprot):
args = delete_table_column_statistics_args()
args.read(iprot)
iprot.readMessageEnd()
result = delete_table_column_statistics_result()
try:
result.success = self._handler.delete_table_column_statistics(args.db_name, args.tbl_name, args.col_name)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
except InvalidObjectException as o3:
result.o3 = o3
except InvalidInputException as o4:
result.o4 = o4
oprot.writeMessageBegin("delete_table_column_statistics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_function(self, seqid, iprot, oprot):
args = create_function_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_function_result()
try:
self._handler.create_function(args.func)
except AlreadyExistsException as o1:
result.o1 = o1
except InvalidObjectException as o2:
result.o2 = o2
except MetaException as o3:
result.o3 = o3
except NoSuchObjectException as o4:
result.o4 = o4
oprot.writeMessageBegin("create_function", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_function(self, seqid, iprot, oprot):
args = drop_function_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_function_result()
try:
self._handler.drop_function(args.dbName, args.funcName)
except NoSuchObjectException as o1:
result.o1 = o1
except MetaException as o3:
result.o3 = o3
oprot.writeMessageBegin("drop_function", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_alter_function(self, seqid, iprot, oprot):
args = alter_function_args()
args.read(iprot)
iprot.readMessageEnd()
result = alter_function_result()
try:
self._handler.alter_function(args.dbName, args.funcName, args.newFunc)
except InvalidOperationException as o1:
result.o1 = o1
except MetaException as o2:
result.o2 = o2
oprot.writeMessageBegin("alter_function", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_functions(self, seqid, iprot, oprot):
args = get_functions_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_functions_result()
try:
result.success = self._handler.get_functions(args.dbName, args.pattern)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_functions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_function(self, seqid, iprot, oprot):
args = get_function_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_function_result()
try:
result.success = self._handler.get_function(args.dbName, args.funcName)
except MetaException as o1:
result.o1 = o1
except NoSuchObjectException as o2:
result.o2 = o2
oprot.writeMessageBegin("get_function", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_create_role(self, seqid, iprot, oprot):
args = create_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = create_role_result()
try:
result.success = self._handler.create_role(args.role)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("create_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_drop_role(self, seqid, iprot, oprot):
args = drop_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = drop_role_result()
try:
result.success = self._handler.drop_role(args.role_name)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("drop_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_role_names(self, seqid, iprot, oprot):
args = get_role_names_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_role_names_result()
try:
result.success = self._handler.get_role_names()
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_role_names", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_grant_role(self, seqid, iprot, oprot):
args = grant_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = grant_role_result()
try:
result.success = self._handler.grant_role(args.role_name, args.principal_name, args.principal_type, args.grantor, args.grantorType, args.grant_option)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("grant_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_revoke_role(self, seqid, iprot, oprot):
args = revoke_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = revoke_role_result()
try:
result.success = self._handler.revoke_role(args.role_name, args.principal_name, args.principal_type)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("revoke_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_list_roles(self, seqid, iprot, oprot):
args = list_roles_args()
args.read(iprot)
iprot.readMessageEnd()
result = list_roles_result()
try:
result.success = self._handler.list_roles(args.principal_name, args.principal_type)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("list_roles", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_grant_revoke_role(self, seqid, iprot, oprot):
args = grant_revoke_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = grant_revoke_role_result()
try:
result.success = self._handler.grant_revoke_role(args.request)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("grant_revoke_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_principals_in_role(self, seqid, iprot, oprot):
args = get_principals_in_role_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_principals_in_role_result()
try:
result.success = self._handler.get_principals_in_role(args.request)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_principals_in_role", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_role_grants_for_principal(self, seqid, iprot, oprot):
args = get_role_grants_for_principal_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_role_grants_for_principal_result()
try:
result.success = self._handler.get_role_grants_for_principal(args.request)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_role_grants_for_principal", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_privilege_set(self, seqid, iprot, oprot):
args = get_privilege_set_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_privilege_set_result()
try:
result.success = self._handler.get_privilege_set(args.hiveObject, args.user_name, args.group_names)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_privilege_set", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_list_privileges(self, seqid, iprot, oprot):
args = list_privileges_args()
args.read(iprot)
iprot.readMessageEnd()
result = list_privileges_result()
try:
result.success = self._handler.list_privileges(args.principal_name, args.principal_type, args.hiveObject)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("list_privileges", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_grant_privileges(self, seqid, iprot, oprot):
args = grant_privileges_args()
args.read(iprot)
iprot.readMessageEnd()
result = grant_privileges_result()
try:
result.success = self._handler.grant_privileges(args.privileges)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("grant_privileges", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_revoke_privileges(self, seqid, iprot, oprot):
args = revoke_privileges_args()
args.read(iprot)
iprot.readMessageEnd()
result = revoke_privileges_result()
try:
result.success = self._handler.revoke_privileges(args.privileges)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("revoke_privileges", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_grant_revoke_privileges(self, seqid, iprot, oprot):
args = grant_revoke_privileges_args()
args.read(iprot)
iprot.readMessageEnd()
result = grant_revoke_privileges_result()
try:
result.success = self._handler.grant_revoke_privileges(args.request)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("grant_revoke_privileges", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_set_ugi(self, seqid, iprot, oprot):
args = set_ugi_args()
args.read(iprot)
iprot.readMessageEnd()
result = set_ugi_result()
try:
result.success = self._handler.set_ugi(args.user_name, args.group_names)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("set_ugi", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_delegation_token(self, seqid, iprot, oprot):
args = get_delegation_token_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_delegation_token_result()
try:
result.success = self._handler.get_delegation_token(args.token_owner, args.renewer_kerberos_principal_name)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("get_delegation_token", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_renew_delegation_token(self, seqid, iprot, oprot):
args = renew_delegation_token_args()
args.read(iprot)
iprot.readMessageEnd()
result = renew_delegation_token_result()
try:
result.success = self._handler.renew_delegation_token(args.token_str_form)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("renew_delegation_token", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_cancel_delegation_token(self, seqid, iprot, oprot):
args = cancel_delegation_token_args()
args.read(iprot)
iprot.readMessageEnd()
result = cancel_delegation_token_result()
try:
self._handler.cancel_delegation_token(args.token_str_form)
except MetaException as o1:
result.o1 = o1
oprot.writeMessageBegin("cancel_delegation_token", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_open_txns(self, seqid, iprot, oprot):
args = get_open_txns_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_open_txns_result()
result.success = self._handler.get_open_txns()
oprot.writeMessageBegin("get_open_txns", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_open_txns_info(self, seqid, iprot, oprot):
args = get_open_txns_info_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_open_txns_info_result()
result.success = self._handler.get_open_txns_info()
oprot.writeMessageBegin("get_open_txns_info", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_open_txns(self, seqid, iprot, oprot):
args = open_txns_args()
args.read(iprot)
iprot.readMessageEnd()
result = open_txns_result()
result.success = self._handler.open_txns(args.rqst)
oprot.writeMessageBegin("open_txns", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_abort_txn(self, seqid, iprot, oprot):
args = abort_txn_args()
args.read(iprot)
iprot.readMessageEnd()
result = abort_txn_result()
try:
self._handler.abort_txn(args.rqst)
except NoSuchTxnException as o1:
result.o1 = o1
oprot.writeMessageBegin("abort_txn", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_commit_txn(self, seqid, iprot, oprot):
args = commit_txn_args()
args.read(iprot)
iprot.readMessageEnd()
result = commit_txn_result()
try:
self._handler.commit_txn(args.rqst)
except NoSuchTxnException as o1:
result.o1 = o1
except TxnAbortedException as o2:
result.o2 = o2
oprot.writeMessageBegin("commit_txn", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_lock(self, seqid, iprot, oprot):
args = lock_args()
args.read(iprot)
iprot.readMessageEnd()
result = lock_result()
try:
result.success = self._handler.lock(args.rqst)
except NoSuchTxnException as o1:
result.o1 = o1
except TxnAbortedException as o2:
result.o2 = o2
oprot.writeMessageBegin("lock", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_check_lock(self, seqid, iprot, oprot):
args = check_lock_args()
args.read(iprot)
iprot.readMessageEnd()
result = check_lock_result()
try:
result.success = self._handler.check_lock(args.rqst)
except NoSuchTxnException as o1:
result.o1 = o1
except TxnAbortedException as o2:
result.o2 = o2
except NoSuchLockException as o3:
result.o3 = o3
oprot.writeMessageBegin("check_lock", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unlock(self, seqid, iprot, oprot):
args = unlock_args()
args.read(iprot)
iprot.readMessageEnd()
result = unlock_result()
try:
self._handler.unlock(args.rqst)
except NoSuchLockException as o1:
result.o1 = o1
except TxnOpenException as o2:
result.o2 = o2
oprot.writeMessageBegin("unlock", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_show_locks(self, seqid, iprot, oprot):
args = show_locks_args()
args.read(iprot)
iprot.readMessageEnd()
result = show_locks_result()
result.success = self._handler.show_locks(args.rqst)
oprot.writeMessageBegin("show_locks", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_heartbeat(self, seqid, iprot, oprot):
args = heartbeat_args()
args.read(iprot)
iprot.readMessageEnd()
result = heartbeat_result()
try:
self._handler.heartbeat(args.ids)
except NoSuchLockException as o1:
result.o1 = o1
except NoSuchTxnException as o2:
result.o2 = o2
except TxnAbortedException as o3:
result.o3 = o3
oprot.writeMessageBegin("heartbeat", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_heartbeat_txn_range(self, seqid, iprot, oprot):
args = heartbeat_txn_range_args()
args.read(iprot)
iprot.readMessageEnd()
result = heartbeat_txn_range_result()
result.success = self._handler.heartbeat_txn_range(args.txns)
oprot.writeMessageBegin("heartbeat_txn_range", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_compact(self, seqid, iprot, oprot):
args = compact_args()
args.read(iprot)
iprot.readMessageEnd()
result = compact_result()
self._handler.compact(args.rqst)
oprot.writeMessageBegin("compact", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_show_compact(self, seqid, iprot, oprot):
args = show_compact_args()
args.read(iprot)
iprot.readMessageEnd()
result = show_compact_result()
result.success = self._handler.show_compact(args.rqst)
oprot.writeMessageBegin("show_compact", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_next_notification(self, seqid, iprot, oprot):
args = get_next_notification_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_next_notification_result()
result.success = self._handler.get_next_notification(args.rqst)
oprot.writeMessageBegin("get_next_notification", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_current_notificationEventId(self, seqid, iprot, oprot):
args = get_current_notificationEventId_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_current_notificationEventId_result()
result.success = self._handler.get_current_notificationEventId()
oprot.writeMessageBegin("get_current_notificationEventId", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class getMetaConf_args:
"""
Attributes:
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
)
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMetaConf_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMetaConf_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMetaConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setMetaConf_args:
"""
Attributes:
- key
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.STRING, 'value', None, None, ), # 2
)
def __init__(self, key=None, value=None,):
self.key = key
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setMetaConf_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 2)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setMetaConf_result:
"""
Attributes:
- o1
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, o1=None,):
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setMetaConf_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_database_args:
"""
Attributes:
- database
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'database', (Database, Database.thrift_spec), None, ), # 1
)
def __init__(self, database=None,):
self.database = database
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.database = Database()
self.database.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_database_args')
if self.database is not None:
oprot.writeFieldBegin('database', TType.STRUCT, 1)
self.database.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_database_result:
"""
Attributes:
- o1
- o2
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, o1=None, o2=None, o3=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = AlreadyExistsException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_database_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_database_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_database_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_database_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Database, Database.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Database()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_database_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_database_args:
"""
Attributes:
- name
- deleteData
- cascade
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.BOOL, 'deleteData', None, None, ), # 2
(3, TType.BOOL, 'cascade', None, None, ), # 3
)
def __init__(self, name=None, deleteData=None, cascade=None,):
self.name = name
self.deleteData = deleteData
self.cascade = cascade
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.cascade = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_database_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 2)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
if self.cascade is not None:
oprot.writeFieldBegin('cascade', TType.BOOL, 3)
oprot.writeBool(self.cascade)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_database_result:
"""
Attributes:
- o1
- o2
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, o1=None, o2=None, o3=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidOperationException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_database_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_databases_args:
"""
Attributes:
- pattern
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'pattern', None, None, ), # 1
)
def __init__(self, pattern=None,):
self.pattern = pattern
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.pattern = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_args')
if self.pattern is not None:
oprot.writeFieldBegin('pattern', TType.STRING, 1)
oprot.writeString(self.pattern)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_databases_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype444, _size441) = iprot.readListBegin()
for _i445 in xrange(_size441):
_elem446 = iprot.readString();
self.success.append(_elem446)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_databases_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter447 in self.success:
oprot.writeString(iter447)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_all_databases_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_all_databases_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_all_databases_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype451, _size448) = iprot.readListBegin()
for _i452 in xrange(_size448):
_elem453 = iprot.readString();
self.success.append(_elem453)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_all_databases_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter454 in self.success:
oprot.writeString(iter454)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_database_args:
"""
Attributes:
- dbname
- db
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRUCT, 'db', (Database, Database.thrift_spec), None, ), # 2
)
def __init__(self, dbname=None, db=None,):
self.dbname = dbname
self.db = db
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.db = Database()
self.db.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_database_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.db is not None:
oprot.writeFieldBegin('db', TType.STRUCT, 2)
self.db.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_database_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_database_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_type_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_type_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_type_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Type, Type.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Type()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_type_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_type_args:
"""
Attributes:
- type
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'type', (Type, Type.thrift_spec), None, ), # 1
)
def __init__(self, type=None,):
self.type = type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.type = Type()
self.type.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_type_args')
if self.type is not None:
oprot.writeFieldBegin('type', TType.STRUCT, 1)
self.type.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_type_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = AlreadyExistsException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_type_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_type_args:
"""
Attributes:
- type
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'type', None, None, ), # 1
)
def __init__(self, type=None,):
self.type = type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.type = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_type_args')
if self.type is not None:
oprot.writeFieldBegin('type', TType.STRING, 1)
oprot.writeString(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_type_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_type_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_type_all_args:
"""
Attributes:
- name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
)
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_type_all_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_type_all_result:
"""
Attributes:
- success
- o2
"""
thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING,None,TType.STRUCT,(Type, Type.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o2=None,):
self.success = success
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype456, _vtype457, _size455 ) = iprot.readMapBegin()
for _i459 in xrange(_size455):
_key460 = iprot.readString();
_val461 = Type()
_val461.read(iprot)
self.success[_key460] = _val461
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_type_all_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.success))
for kiter462,viter463 in self.success.items():
oprot.writeString(kiter462)
viter463.write(oprot)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 1)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_fields_args:
"""
Attributes:
- db_name
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, db_name=None, table_name=None,):
self.db_name = db_name
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_fields_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_fields_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype467, _size464) = iprot.readListBegin()
for _i468 in xrange(_size464):
_elem469 = FieldSchema()
_elem469.read(iprot)
self.success.append(_elem469)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = UnknownTableException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_fields_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter470 in self.success:
iter470.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_fields_with_environment_context_args:
"""
Attributes:
- db_name
- table_name
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 3
)
def __init__(self, db_name=None, table_name=None, environment_context=None,):
self.db_name = db_name
self.table_name = table_name
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_fields_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 3)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_fields_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype474, _size471) = iprot.readListBegin()
for _i475 in xrange(_size471):
_elem476 = FieldSchema()
_elem476.read(iprot)
self.success.append(_elem476)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = UnknownTableException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_fields_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter477 in self.success:
iter477.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_schema_args:
"""
Attributes:
- db_name
- table_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
)
def __init__(self, db_name=None, table_name=None,):
self.db_name = db_name
self.table_name = table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_schema_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_schema_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype481, _size478) = iprot.readListBegin()
for _i482 in xrange(_size478):
_elem483 = FieldSchema()
_elem483.read(iprot)
self.success.append(_elem483)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = UnknownTableException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_schema_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter484 in self.success:
iter484.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_schema_with_environment_context_args:
"""
Attributes:
- db_name
- table_name
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'table_name', None, None, ), # 2
(3, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 3
)
def __init__(self, db_name=None, table_name=None, environment_context=None,):
self.db_name = db_name
self.table_name = table_name
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.table_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_schema_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.table_name is not None:
oprot.writeFieldBegin('table_name', TType.STRING, 2)
oprot.writeString(self.table_name)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 3)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_schema_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype488, _size485) = iprot.readListBegin()
for _i489 in xrange(_size485):
_elem490 = FieldSchema()
_elem490.read(iprot)
self.success.append(_elem490)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = UnknownTableException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_schema_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter491 in self.success:
iter491.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_args:
"""
Attributes:
- tbl
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'tbl', (Table, Table.thrift_spec), None, ), # 1
)
def __init__(self, tbl=None,):
self.tbl = tbl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.tbl = Table()
self.tbl.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_args')
if self.tbl is not None:
oprot.writeFieldBegin('tbl', TType.STRUCT, 1)
self.tbl.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_result:
"""
Attributes:
- o1
- o2
- o3
- o4
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 4
)
def __init__(self, o1=None, o2=None, o3=None, o4=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = AlreadyExistsException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = NoSuchObjectException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_with_environment_context_args:
"""
Attributes:
- tbl
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'tbl', (Table, Table.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 2
)
def __init__(self, tbl=None, environment_context=None,):
self.tbl = tbl
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.tbl = Table()
self.tbl.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_with_environment_context_args')
if self.tbl is not None:
oprot.writeFieldBegin('tbl', TType.STRUCT, 1)
self.tbl.write(oprot)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 2)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_table_with_environment_context_result:
"""
Attributes:
- o1
- o2
- o3
- o4
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 4
)
def __init__(self, o1=None, o2=None, o3=None, o4=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = AlreadyExistsException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = NoSuchObjectException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_table_with_environment_context_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_table_args:
"""
Attributes:
- dbname
- name
- deleteData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.BOOL, 'deleteData', None, None, ), # 3
)
def __init__(self, dbname=None, name=None, deleteData=None,):
self.dbname = dbname
self.name = name
self.deleteData = deleteData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_table_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 3)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_table_result:
"""
Attributes:
- o1
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o3=None,):
self.o1 = o1
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_table_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 2)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_table_with_environment_context_args:
"""
Attributes:
- dbname
- name
- deleteData
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.BOOL, 'deleteData', None, None, ), # 3
(4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
)
def __init__(self, dbname=None, name=None, deleteData=None, environment_context=None,):
self.dbname = dbname
self.name = name
self.deleteData = deleteData
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_table_with_environment_context_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 3)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_table_with_environment_context_result:
"""
Attributes:
- o1
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o3=None,):
self.o1 = o1
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_table_with_environment_context_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 2)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_tables_args:
"""
Attributes:
- db_name
- pattern
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'pattern', None, None, ), # 2
)
def __init__(self, db_name=None, pattern=None,):
self.db_name = db_name
self.pattern = pattern
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.pattern = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_tables_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.pattern is not None:
oprot.writeFieldBegin('pattern', TType.STRING, 2)
oprot.writeString(self.pattern)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_tables_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype495, _size492) = iprot.readListBegin()
for _i496 in xrange(_size492):
_elem497 = iprot.readString();
self.success.append(_elem497)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_tables_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter498 in self.success:
oprot.writeString(iter498)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_all_tables_args:
"""
Attributes:
- db_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
)
def __init__(self, db_name=None,):
self.db_name = db_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_all_tables_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_all_tables_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype502, _size499) = iprot.readListBegin()
for _i503 in xrange(_size499):
_elem504 = iprot.readString();
self.success.append(_elem504)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_all_tables_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter505 in self.success:
oprot.writeString(iter505)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_args:
"""
Attributes:
- dbname
- tbl_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
)
def __init__(self, dbname=None, tbl_name=None,):
self.dbname = dbname
self.tbl_name = tbl_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Table, Table.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Table()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_objects_by_name_args:
"""
Attributes:
- dbname
- tbl_names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.LIST, 'tbl_names', (TType.STRING,None), None, ), # 2
)
def __init__(self, dbname=None, tbl_names=None,):
self.dbname = dbname
self.tbl_names = tbl_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.tbl_names = []
(_etype509, _size506) = iprot.readListBegin()
for _i510 in xrange(_size506):
_elem511 = iprot.readString();
self.tbl_names.append(_elem511)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_objects_by_name_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.tbl_names is not None:
oprot.writeFieldBegin('tbl_names', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.tbl_names))
for iter512 in self.tbl_names:
oprot.writeString(iter512)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_objects_by_name_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Table, Table.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype516, _size513) = iprot.readListBegin()
for _i517 in xrange(_size513):
_elem518 = Table()
_elem518.read(iprot)
self.success.append(_elem518)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidOperationException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_objects_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter519 in self.success:
iter519.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_names_by_filter_args:
"""
Attributes:
- dbname
- filter
- max_tables
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'filter', None, None, ), # 2
(3, TType.I16, 'max_tables', None, -1, ), # 3
)
def __init__(self, dbname=None, filter=None, max_tables=thrift_spec[3][4],):
self.dbname = dbname
self.filter = filter
self.max_tables = max_tables
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.filter = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_tables = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_names_by_filter_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRING, 2)
oprot.writeString(self.filter)
oprot.writeFieldEnd()
if self.max_tables is not None:
oprot.writeFieldBegin('max_tables', TType.I16, 3)
oprot.writeI16(self.max_tables)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_names_by_filter_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype523, _size520) = iprot.readListBegin()
for _i524 in xrange(_size520):
_elem525 = iprot.readString();
self.success.append(_elem525)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidOperationException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_names_by_filter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter526 in self.success:
oprot.writeString(iter526)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_args:
"""
Attributes:
- dbname
- tbl_name
- new_tbl
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRUCT, 'new_tbl', (Table, Table.thrift_spec), None, ), # 3
)
def __init__(self, dbname=None, tbl_name=None, new_tbl=None,):
self.dbname = dbname
self.tbl_name = tbl_name
self.new_tbl = new_tbl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.new_tbl = Table()
self.new_tbl.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_tbl is not None:
oprot.writeFieldBegin('new_tbl', TType.STRUCT, 3)
self.new_tbl.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_with_environment_context_args:
"""
Attributes:
- dbname
- tbl_name
- new_tbl
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRUCT, 'new_tbl', (Table, Table.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
)
def __init__(self, dbname=None, tbl_name=None, new_tbl=None, environment_context=None,):
self.dbname = dbname
self.tbl_name = tbl_name
self.new_tbl = new_tbl
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.new_tbl = Table()
self.new_tbl.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_with_environment_context_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_tbl is not None:
oprot.writeFieldBegin('new_tbl', TType.STRUCT, 3)
self.new_tbl.write(oprot)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_with_environment_context_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_with_environment_context_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_with_cascade_args:
"""
Attributes:
- dbname
- tbl_name
- new_tbl
- cascade
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRUCT, 'new_tbl', (Table, Table.thrift_spec), None, ), # 3
(4, TType.BOOL, 'cascade', None, None, ), # 4
)
def __init__(self, dbname=None, tbl_name=None, new_tbl=None, cascade=None,):
self.dbname = dbname
self.tbl_name = tbl_name
self.new_tbl = new_tbl
self.cascade = cascade
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.new_tbl = Table()
self.new_tbl.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.cascade = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_with_cascade_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_tbl is not None:
oprot.writeFieldBegin('new_tbl', TType.STRUCT, 3)
self.new_tbl.write(oprot)
oprot.writeFieldEnd()
if self.cascade is not None:
oprot.writeFieldBegin('cascade', TType.BOOL, 4)
oprot.writeBool(self.cascade)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_table_with_cascade_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_table_with_cascade_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partition_args:
"""
Attributes:
- new_part
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'new_part', (Partition, Partition.thrift_spec), None, ), # 1
)
def __init__(self, new_part=None,):
self.new_part = new_part
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.new_part = Partition()
self.new_part.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partition_args')
if self.new_part is not None:
oprot.writeFieldBegin('new_part', TType.STRUCT, 1)
self.new_part.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partition_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partition_with_environment_context_args:
"""
Attributes:
- new_part
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'new_part', (Partition, Partition.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 2
)
def __init__(self, new_part=None, environment_context=None,):
self.new_part = new_part
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.new_part = Partition()
self.new_part.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partition_with_environment_context_args')
if self.new_part is not None:
oprot.writeFieldBegin('new_part', TType.STRUCT, 1)
self.new_part.write(oprot)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 2)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partition_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partition_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_args:
"""
Attributes:
- new_parts
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'new_parts', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 1
)
def __init__(self, new_parts=None,):
self.new_parts = new_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.new_parts = []
(_etype530, _size527) = iprot.readListBegin()
for _i531 in xrange(_size527):
_elem532 = Partition()
_elem532.read(iprot)
self.new_parts.append(_elem532)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_args')
if self.new_parts is not None:
oprot.writeFieldBegin('new_parts', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.new_parts))
for iter533 in self.new_parts:
iter533.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_pspec_args:
"""
Attributes:
- new_parts
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'new_parts', (TType.STRUCT,(PartitionSpec, PartitionSpec.thrift_spec)), None, ), # 1
)
def __init__(self, new_parts=None,):
self.new_parts = new_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.new_parts = []
(_etype537, _size534) = iprot.readListBegin()
for _i538 in xrange(_size534):
_elem539 = PartitionSpec()
_elem539.read(iprot)
self.new_parts.append(_elem539)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_pspec_args')
if self.new_parts is not None:
oprot.writeFieldBegin('new_parts', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.new_parts))
for iter540 in self.new_parts:
iter540.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_pspec_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_pspec_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype544, _size541) = iprot.readListBegin()
for _i545 in xrange(_size541):
_elem546 = iprot.readString();
self.part_vals.append(_elem546)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter547 in self.part_vals:
oprot.writeString(iter547)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_req_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (AddPartitionsRequest, AddPartitionsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = AddPartitionsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_req_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_partitions_req_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (AddPartitionsResult, AddPartitionsResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = AddPartitionsResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_partitions_req_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_with_environment_context_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, environment_context=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype551, _size548) = iprot.readListBegin()
for _i552 in xrange(_size548):
_elem553 = iprot.readString();
self.part_vals.append(_elem553)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter554 in self.part_vals:
oprot.writeString(iter554)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_by_name_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, part_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_by_name_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_by_name_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_by_name_with_environment_context_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
(4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_name=None, environment_context=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_by_name_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_partition_by_name_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_partition_by_name_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- deleteData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.BOOL, 'deleteData', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, deleteData=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.deleteData = deleteData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype558, _size555) = iprot.readListBegin()
for _i559 in xrange(_size555):
_elem560 = iprot.readString();
self.part_vals.append(_elem560)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter561 in self.part_vals:
oprot.writeString(iter561)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_with_environment_context_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- deleteData
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.BOOL, 'deleteData', None, None, ), # 4
(5, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 5
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, deleteData=None, environment_context=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.deleteData = deleteData
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype565, _size562) = iprot.readListBegin()
for _i566 in xrange(_size562):
_elem567 = iprot.readString();
self.part_vals.append(_elem567)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter568 in self.part_vals:
oprot.writeString(iter568)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 5)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_by_name_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
- deleteData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
(4, TType.BOOL, 'deleteData', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_name=None, deleteData=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
self.deleteData = deleteData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_by_name_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_by_name_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_by_name_with_environment_context_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
- deleteData
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
(4, TType.BOOL, 'deleteData', None, None, ), # 4
(5, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 5
)
def __init__(self, db_name=None, tbl_name=None, part_name=None, deleteData=None, environment_context=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
self.deleteData = deleteData
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_by_name_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 5)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partition_by_name_with_environment_context_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partition_by_name_with_environment_context_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partitions_req_args:
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (DropPartitionsRequest, DropPartitionsRequest.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = DropPartitionsRequest()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partitions_req_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_partitions_req_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (DropPartitionsResult, DropPartitionsResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = DropPartitionsResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_partitions_req_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype572, _size569) = iprot.readListBegin()
for _i573 in xrange(_size569):
_elem574 = iprot.readString();
self.part_vals.append(_elem574)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter575 in self.part_vals:
oprot.writeString(iter575)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class exchange_partition_args:
"""
Attributes:
- partitionSpecs
- source_db
- source_table_name
- dest_db
- dest_table_name
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'partitionSpecs', (TType.STRING,None,TType.STRING,None), None, ), # 1
(2, TType.STRING, 'source_db', None, None, ), # 2
(3, TType.STRING, 'source_table_name', None, None, ), # 3
(4, TType.STRING, 'dest_db', None, None, ), # 4
(5, TType.STRING, 'dest_table_name', None, None, ), # 5
)
def __init__(self, partitionSpecs=None, source_db=None, source_table_name=None, dest_db=None, dest_table_name=None,):
self.partitionSpecs = partitionSpecs
self.source_db = source_db
self.source_table_name = source_table_name
self.dest_db = dest_db
self.dest_table_name = dest_table_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.partitionSpecs = {}
(_ktype577, _vtype578, _size576 ) = iprot.readMapBegin()
for _i580 in xrange(_size576):
_key581 = iprot.readString();
_val582 = iprot.readString();
self.partitionSpecs[_key581] = _val582
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.source_db = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.source_table_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.dest_db = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.dest_table_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exchange_partition_args')
if self.partitionSpecs is not None:
oprot.writeFieldBegin('partitionSpecs', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.partitionSpecs))
for kiter583,viter584 in self.partitionSpecs.items():
oprot.writeString(kiter583)
oprot.writeString(viter584)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.source_db is not None:
oprot.writeFieldBegin('source_db', TType.STRING, 2)
oprot.writeString(self.source_db)
oprot.writeFieldEnd()
if self.source_table_name is not None:
oprot.writeFieldBegin('source_table_name', TType.STRING, 3)
oprot.writeString(self.source_table_name)
oprot.writeFieldEnd()
if self.dest_db is not None:
oprot.writeFieldBegin('dest_db', TType.STRING, 4)
oprot.writeString(self.dest_db)
oprot.writeFieldEnd()
if self.dest_table_name is not None:
oprot.writeFieldBegin('dest_table_name', TType.STRING, 5)
oprot.writeString(self.dest_table_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class exchange_partition_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = InvalidObjectException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exchange_partition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_with_auth_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- user_name
- group_names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.STRING, 'user_name', None, None, ), # 4
(5, TType.LIST, 'group_names', (TType.STRING,None), None, ), # 5
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, user_name=None, group_names=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.user_name = user_name
self.group_names = group_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype588, _size585) = iprot.readListBegin()
for _i589 in xrange(_size585):
_elem590 = iprot.readString();
self.part_vals.append(_elem590)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.user_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.group_names = []
(_etype594, _size591) = iprot.readListBegin()
for _i595 in xrange(_size591):
_elem596 = iprot.readString();
self.group_names.append(_elem596)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_with_auth_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter597 in self.part_vals:
oprot.writeString(iter597)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.user_name is not None:
oprot.writeFieldBegin('user_name', TType.STRING, 4)
oprot.writeString(self.user_name)
oprot.writeFieldEnd()
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.group_names))
for iter598 in self.group_names:
oprot.writeString(iter598)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_with_auth_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_with_auth_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_by_name_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, part_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_by_name_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_by_name_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Partition()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_args:
"""
Attributes:
- db_name
- tbl_name
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I16, 'max_parts', None, -1, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, max_parts=thrift_spec[3][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 3)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype602, _size599) = iprot.readListBegin()
for _i603 in xrange(_size599):
_elem604 = Partition()
_elem604.read(iprot)
self.success.append(_elem604)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter605 in self.success:
iter605.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_with_auth_args:
"""
Attributes:
- db_name
- tbl_name
- max_parts
- user_name
- group_names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I16, 'max_parts', None, -1, ), # 3
(4, TType.STRING, 'user_name', None, None, ), # 4
(5, TType.LIST, 'group_names', (TType.STRING,None), None, ), # 5
)
def __init__(self, db_name=None, tbl_name=None, max_parts=thrift_spec[3][4], user_name=None, group_names=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_parts = max_parts
self.user_name = user_name
self.group_names = group_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.user_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.group_names = []
(_etype609, _size606) = iprot.readListBegin()
for _i610 in xrange(_size606):
_elem611 = iprot.readString();
self.group_names.append(_elem611)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_with_auth_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 3)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
if self.user_name is not None:
oprot.writeFieldBegin('user_name', TType.STRING, 4)
oprot.writeString(self.user_name)
oprot.writeFieldEnd()
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.group_names))
for iter612 in self.group_names:
oprot.writeString(iter612)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_with_auth_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype616, _size613) = iprot.readListBegin()
for _i617 in xrange(_size613):
_elem618 = Partition()
_elem618.read(iprot)
self.success.append(_elem618)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_with_auth_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter619 in self.success:
iter619.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_pspec_args:
"""
Attributes:
- db_name
- tbl_name
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I32, 'max_parts', None, -1, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, max_parts=thrift_spec[3][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.max_parts = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_pspec_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I32, 3)
oprot.writeI32(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_pspec_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(PartitionSpec, PartitionSpec.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype623, _size620) = iprot.readListBegin()
for _i624 in xrange(_size620):
_elem625 = PartitionSpec()
_elem625.read(iprot)
self.success.append(_elem625)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_pspec_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter626 in self.success:
iter626.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_names_args:
"""
Attributes:
- db_name
- tbl_name
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I16, 'max_parts', None, -1, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, max_parts=thrift_spec[3][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_names_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 3)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_names_result:
"""
Attributes:
- success
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o2=None,):
self.success = success
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype630, _size627) = iprot.readListBegin()
for _i631 in xrange(_size627):
_elem632 = iprot.readString();
self.success.append(_elem632)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_names_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter633 in self.success:
oprot.writeString(iter633)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 1)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_ps_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.I16, 'max_parts', None, -1, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, max_parts=thrift_spec[4][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype637, _size634) = iprot.readListBegin()
for _i638 in xrange(_size634):
_elem639 = iprot.readString();
self.part_vals.append(_elem639)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_ps_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter640 in self.part_vals:
oprot.writeString(iter640)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 4)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_ps_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype644, _size641) = iprot.readListBegin()
for _i645 in xrange(_size641):
_elem646 = Partition()
_elem646.read(iprot)
self.success.append(_elem646)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_ps_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter647 in self.success:
iter647.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_ps_with_auth_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- max_parts
- user_name
- group_names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.I16, 'max_parts', None, -1, ), # 4
(5, TType.STRING, 'user_name', None, None, ), # 5
(6, TType.LIST, 'group_names', (TType.STRING,None), None, ), # 6
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, max_parts=thrift_spec[4][4], user_name=None, group_names=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.max_parts = max_parts
self.user_name = user_name
self.group_names = group_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype651, _size648) = iprot.readListBegin()
for _i652 in xrange(_size648):
_elem653 = iprot.readString();
self.part_vals.append(_elem653)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.user_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.group_names = []
(_etype657, _size654) = iprot.readListBegin()
for _i658 in xrange(_size654):
_elem659 = iprot.readString();
self.group_names.append(_elem659)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_ps_with_auth_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter660 in self.part_vals:
oprot.writeString(iter660)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 4)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
if self.user_name is not None:
oprot.writeFieldBegin('user_name', TType.STRING, 5)
oprot.writeString(self.user_name)
oprot.writeFieldEnd()
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 6)
oprot.writeListBegin(TType.STRING, len(self.group_names))
for iter661 in self.group_names:
oprot.writeString(iter661)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_ps_with_auth_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype665, _size662) = iprot.readListBegin()
for _i666 in xrange(_size662):
_elem667 = Partition()
_elem667.read(iprot)
self.success.append(_elem667)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_ps_with_auth_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter668 in self.success:
iter668.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_names_ps_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.I16, 'max_parts', None, -1, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, max_parts=thrift_spec[4][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype672, _size669) = iprot.readListBegin()
for _i673 in xrange(_size669):
_elem674 = iprot.readString();
self.part_vals.append(_elem674)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_names_ps_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter675 in self.part_vals:
oprot.writeString(iter675)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 4)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_names_ps_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype679, _size676) = iprot.readListBegin()
for _i680 in xrange(_size676):
_elem681 = iprot.readString();
self.success.append(_elem681)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_names_ps_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter682 in self.success:
oprot.writeString(iter682)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_filter_args:
"""
Attributes:
- db_name
- tbl_name
- filter
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'filter', None, None, ), # 3
(4, TType.I16, 'max_parts', None, -1, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, filter=None, max_parts=thrift_spec[4][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.filter = filter
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.filter = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I16:
self.max_parts = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_filter_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRING, 3)
oprot.writeString(self.filter)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I16, 4)
oprot.writeI16(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_filter_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype686, _size683) = iprot.readListBegin()
for _i687 in xrange(_size683):
_elem688 = Partition()
_elem688.read(iprot)
self.success.append(_elem688)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_filter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter689 in self.success:
iter689.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_part_specs_by_filter_args:
"""
Attributes:
- db_name
- tbl_name
- filter
- max_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'filter', None, None, ), # 3
(4, TType.I32, 'max_parts', None, -1, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, filter=None, max_parts=thrift_spec[4][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.filter = filter
self.max_parts = max_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.filter = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.max_parts = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_part_specs_by_filter_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRING, 3)
oprot.writeString(self.filter)
oprot.writeFieldEnd()
if self.max_parts is not None:
oprot.writeFieldBegin('max_parts', TType.I32, 4)
oprot.writeI32(self.max_parts)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_part_specs_by_filter_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(PartitionSpec, PartitionSpec.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype693, _size690) = iprot.readListBegin()
for _i694 in xrange(_size690):
_elem695 = PartitionSpec()
_elem695.read(iprot)
self.success.append(_elem695)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_part_specs_by_filter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter696 in self.success:
iter696.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_expr_args:
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (PartitionsByExprRequest, PartitionsByExprRequest.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = PartitionsByExprRequest()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_expr_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_expr_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (PartitionsByExprResult, PartitionsByExprResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = PartitionsByExprResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_expr_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_names_args:
"""
Attributes:
- db_name
- tbl_name
- names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'names', (TType.STRING,None), None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, names=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.names = names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.names = []
(_etype700, _size697) = iprot.readListBegin()
for _i701 in xrange(_size697):
_elem702 = iprot.readString();
self.names.append(_elem702)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_names_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.names is not None:
oprot.writeFieldBegin('names', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.names))
for iter703 in self.names:
oprot.writeString(iter703)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_by_names_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype707, _size704) = iprot.readListBegin()
for _i708 in xrange(_size704):
_elem709 = Partition()
_elem709.read(iprot)
self.success.append(_elem709)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_by_names_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter710 in self.success:
iter710.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partition_args:
"""
Attributes:
- db_name
- tbl_name
- new_part
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRUCT, 'new_part', (Partition, Partition.thrift_spec), None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, new_part=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.new_part = new_part
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.new_part = Partition()
self.new_part.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partition_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_part is not None:
oprot.writeFieldBegin('new_part', TType.STRUCT, 3)
self.new_part.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partition_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partition_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partitions_args:
"""
Attributes:
- db_name
- tbl_name
- new_parts
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'new_parts', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, new_parts=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.new_parts = new_parts
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.new_parts = []
(_etype714, _size711) = iprot.readListBegin()
for _i715 in xrange(_size711):
_elem716 = Partition()
_elem716.read(iprot)
self.new_parts.append(_elem716)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partitions_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_parts is not None:
oprot.writeFieldBegin('new_parts', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.new_parts))
for iter717 in self.new_parts:
iter717.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partitions_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partitions_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partition_with_environment_context_args:
"""
Attributes:
- db_name
- tbl_name
- new_part
- environment_context
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRUCT, 'new_part', (Partition, Partition.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, new_part=None, environment_context=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.new_part = new_part
self.environment_context = environment_context
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.new_part = Partition()
self.new_part.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.environment_context = EnvironmentContext()
self.environment_context.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partition_with_environment_context_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.new_part is not None:
oprot.writeFieldBegin('new_part', TType.STRUCT, 3)
self.new_part.write(oprot)
oprot.writeFieldEnd()
if self.environment_context is not None:
oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
self.environment_context.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_partition_with_environment_context_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_partition_with_environment_context_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rename_partition_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- new_part
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
(4, TType.STRUCT, 'new_part', (Partition, Partition.thrift_spec), None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, new_part=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.new_part = new_part
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.part_vals = []
(_etype721, _size718) = iprot.readListBegin()
for _i722 in xrange(_size718):
_elem723 = iprot.readString();
self.part_vals.append(_elem723)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.new_part = Partition()
self.new_part.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rename_partition_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter724 in self.part_vals:
oprot.writeString(iter724)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.new_part is not None:
oprot.writeFieldBegin('new_part', TType.STRUCT, 4)
self.new_part.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class rename_partition_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('rename_partition_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_has_valid_characters_args:
"""
Attributes:
- part_vals
- throw_exception
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 1
(2, TType.BOOL, 'throw_exception', None, None, ), # 2
)
def __init__(self, part_vals=None, throw_exception=None,):
self.part_vals = part_vals
self.throw_exception = throw_exception
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.part_vals = []
(_etype728, _size725) = iprot.readListBegin()
for _i729 in xrange(_size725):
_elem730 = iprot.readString();
self.part_vals.append(_elem730)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.throw_exception = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_has_valid_characters_args')
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.part_vals))
for iter731 in self.part_vals:
oprot.writeString(iter731)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.throw_exception is not None:
oprot.writeFieldBegin('throw_exception', TType.BOOL, 2)
oprot.writeBool(self.throw_exception)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_has_valid_characters_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_has_valid_characters_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_config_value_args:
"""
Attributes:
- name
- defaultValue
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'defaultValue', None, None, ), # 2
)
def __init__(self, name=None, defaultValue=None,):
self.name = name
self.defaultValue = defaultValue
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.defaultValue = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_config_value_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.defaultValue is not None:
oprot.writeFieldBegin('defaultValue', TType.STRING, 2)
oprot.writeString(self.defaultValue)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_config_value_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (ConfigValSecurityException, ConfigValSecurityException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = ConfigValSecurityException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_config_value_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_to_vals_args:
"""
Attributes:
- part_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'part_name', None, None, ), # 1
)
def __init__(self, part_name=None,):
self.part_name = part_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_to_vals_args')
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 1)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_to_vals_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype735, _size732) = iprot.readListBegin()
for _i736 in xrange(_size732):
_elem737 = iprot.readString();
self.success.append(_elem737)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_to_vals_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter738 in self.success:
oprot.writeString(iter738)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_to_spec_args:
"""
Attributes:
- part_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'part_name', None, None, ), # 1
)
def __init__(self, part_name=None,):
self.part_name = part_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_to_spec_args')
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 1)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class partition_name_to_spec_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING,None,TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype740, _vtype741, _size739 ) = iprot.readMapBegin()
for _i743 in xrange(_size739):
_key744 = iprot.readString();
_val745 = iprot.readString();
self.success[_key744] = _val745
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('partition_name_to_spec_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for kiter746,viter747 in self.success.items():
oprot.writeString(kiter746)
oprot.writeString(viter747)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class markPartitionForEvent_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- eventType
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.MAP, 'part_vals', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.I32, 'eventType', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, eventType=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.eventType = eventType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.part_vals = {}
(_ktype749, _vtype750, _size748 ) = iprot.readMapBegin()
for _i752 in xrange(_size748):
_key753 = iprot.readString();
_val754 = iprot.readString();
self.part_vals[_key753] = _val754
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.eventType = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('markPartitionForEvent_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.part_vals))
for kiter755,viter756 in self.part_vals.items():
oprot.writeString(kiter755)
oprot.writeString(viter756)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.eventType is not None:
oprot.writeFieldBegin('eventType', TType.I32, 4)
oprot.writeI32(self.eventType)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class markPartitionForEvent_result:
"""
Attributes:
- o1
- o2
- o3
- o4
- o5
- o6
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'o5', (UnknownPartitionException, UnknownPartitionException.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'o6', (InvalidPartitionException, InvalidPartitionException.thrift_spec), None, ), # 6
)
def __init__(self, o1=None, o2=None, o3=None, o4=None, o5=None, o6=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
self.o5 = o5
self.o6 = o6
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = UnknownTableException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.o5 = UnknownPartitionException()
self.o5.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.o6 = InvalidPartitionException()
self.o6.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('markPartitionForEvent_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
if self.o5 is not None:
oprot.writeFieldBegin('o5', TType.STRUCT, 5)
self.o5.write(oprot)
oprot.writeFieldEnd()
if self.o6 is not None:
oprot.writeFieldBegin('o6', TType.STRUCT, 6)
self.o6.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class isPartitionMarkedForEvent_args:
"""
Attributes:
- db_name
- tbl_name
- part_vals
- eventType
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.MAP, 'part_vals', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.I32, 'eventType', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_vals=None, eventType=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_vals = part_vals
self.eventType = eventType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.part_vals = {}
(_ktype758, _vtype759, _size757 ) = iprot.readMapBegin()
for _i761 in xrange(_size757):
_key762 = iprot.readString();
_val763 = iprot.readString();
self.part_vals[_key762] = _val763
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.eventType = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('isPartitionMarkedForEvent_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_vals is not None:
oprot.writeFieldBegin('part_vals', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.part_vals))
for kiter764,viter765 in self.part_vals.items():
oprot.writeString(kiter764)
oprot.writeString(viter765)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.eventType is not None:
oprot.writeFieldBegin('eventType', TType.I32, 4)
oprot.writeI32(self.eventType)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class isPartitionMarkedForEvent_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
- o5
- o6
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (UnknownDBException, UnknownDBException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (UnknownTableException, UnknownTableException.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'o5', (UnknownPartitionException, UnknownPartitionException.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'o6', (InvalidPartitionException, InvalidPartitionException.thrift_spec), None, ), # 6
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None, o5=None, o6=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
self.o5 = o5
self.o6 = o6
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = UnknownDBException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = UnknownTableException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.o5 = UnknownPartitionException()
self.o5.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.o6 = InvalidPartitionException()
self.o6.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('isPartitionMarkedForEvent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
if self.o5 is not None:
oprot.writeFieldBegin('o5', TType.STRUCT, 5)
self.o5.write(oprot)
oprot.writeFieldEnd()
if self.o6 is not None:
oprot.writeFieldBegin('o6', TType.STRUCT, 6)
self.o6.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_index_args:
"""
Attributes:
- new_index
- index_table
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'new_index', (Index, Index.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'index_table', (Table, Table.thrift_spec), None, ), # 2
)
def __init__(self, new_index=None, index_table=None,):
self.new_index = new_index
self.index_table = index_table
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.new_index = Index()
self.new_index.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.index_table = Table()
self.index_table.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_index_args')
if self.new_index is not None:
oprot.writeFieldBegin('new_index', TType.STRUCT, 1)
self.new_index.write(oprot)
oprot.writeFieldEnd()
if self.index_table is not None:
oprot.writeFieldBegin('index_table', TType.STRUCT, 2)
self.index_table.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_index_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Index, Index.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Index()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = AlreadyExistsException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_index_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_index_args:
"""
Attributes:
- dbname
- base_tbl_name
- idx_name
- new_idx
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbname', None, None, ), # 1
(2, TType.STRING, 'base_tbl_name', None, None, ), # 2
(3, TType.STRING, 'idx_name', None, None, ), # 3
(4, TType.STRUCT, 'new_idx', (Index, Index.thrift_spec), None, ), # 4
)
def __init__(self, dbname=None, base_tbl_name=None, idx_name=None, new_idx=None,):
self.dbname = dbname
self.base_tbl_name = base_tbl_name
self.idx_name = idx_name
self.new_idx = new_idx
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbname = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.base_tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.idx_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.new_idx = Index()
self.new_idx.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_index_args')
if self.dbname is not None:
oprot.writeFieldBegin('dbname', TType.STRING, 1)
oprot.writeString(self.dbname)
oprot.writeFieldEnd()
if self.base_tbl_name is not None:
oprot.writeFieldBegin('base_tbl_name', TType.STRING, 2)
oprot.writeString(self.base_tbl_name)
oprot.writeFieldEnd()
if self.idx_name is not None:
oprot.writeFieldBegin('idx_name', TType.STRING, 3)
oprot.writeString(self.idx_name)
oprot.writeFieldEnd()
if self.new_idx is not None:
oprot.writeFieldBegin('new_idx', TType.STRUCT, 4)
self.new_idx.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_index_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_index_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_index_by_name_args:
"""
Attributes:
- db_name
- tbl_name
- index_name
- deleteData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'index_name', None, None, ), # 3
(4, TType.BOOL, 'deleteData', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, index_name=None, deleteData=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.index_name = index_name
self.deleteData = deleteData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.index_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.deleteData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_index_by_name_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.index_name is not None:
oprot.writeFieldBegin('index_name', TType.STRING, 3)
oprot.writeString(self.index_name)
oprot.writeFieldEnd()
if self.deleteData is not None:
oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
oprot.writeBool(self.deleteData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_index_by_name_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_index_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_index_by_name_args:
"""
Attributes:
- db_name
- tbl_name
- index_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'index_name', None, None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, index_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.index_name = index_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.index_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_index_by_name_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.index_name is not None:
oprot.writeFieldBegin('index_name', TType.STRING, 3)
oprot.writeString(self.index_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_index_by_name_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Index, Index.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Index()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_index_by_name_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_indexes_args:
"""
Attributes:
- db_name
- tbl_name
- max_indexes
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I16, 'max_indexes', None, -1, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, max_indexes=thrift_spec[3][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_indexes = max_indexes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_indexes = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_indexes_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_indexes is not None:
oprot.writeFieldBegin('max_indexes', TType.I16, 3)
oprot.writeI16(self.max_indexes)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_indexes_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Index, Index.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype769, _size766) = iprot.readListBegin()
for _i770 in xrange(_size766):
_elem771 = Index()
_elem771.read(iprot)
self.success.append(_elem771)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_indexes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter772 in self.success:
iter772.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_index_names_args:
"""
Attributes:
- db_name
- tbl_name
- max_indexes
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.I16, 'max_indexes', None, -1, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, max_indexes=thrift_spec[3][4],):
self.db_name = db_name
self.tbl_name = tbl_name
self.max_indexes = max_indexes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I16:
self.max_indexes = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_index_names_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.max_indexes is not None:
oprot.writeFieldBegin('max_indexes', TType.I16, 3)
oprot.writeI16(self.max_indexes)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_index_names_result:
"""
Attributes:
- success
- o2
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o2=None,):
self.success = success
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype776, _size773) = iprot.readListBegin()
for _i777 in xrange(_size773):
_elem778 = iprot.readString();
self.success.append(_elem778)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_index_names_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter779 in self.success:
oprot.writeString(iter779)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 1)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_table_column_statistics_args:
"""
Attributes:
- stats_obj
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'stats_obj', (ColumnStatistics, ColumnStatistics.thrift_spec), None, ), # 1
)
def __init__(self, stats_obj=None,):
self.stats_obj = stats_obj
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.stats_obj = ColumnStatistics()
self.stats_obj.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('update_table_column_statistics_args')
if self.stats_obj is not None:
oprot.writeFieldBegin('stats_obj', TType.STRUCT, 1)
self.stats_obj.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_table_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('update_table_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_partition_column_statistics_args:
"""
Attributes:
- stats_obj
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'stats_obj', (ColumnStatistics, ColumnStatistics.thrift_spec), None, ), # 1
)
def __init__(self, stats_obj=None,):
self.stats_obj = stats_obj
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.stats_obj = ColumnStatistics()
self.stats_obj.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('update_partition_column_statistics_args')
if self.stats_obj is not None:
oprot.writeFieldBegin('stats_obj', TType.STRUCT, 1)
self.stats_obj.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class update_partition_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('update_partition_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_column_statistics_args:
"""
Attributes:
- db_name
- tbl_name
- col_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'col_name', None, None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, col_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.col_name = col_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.col_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_column_statistics_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.col_name is not None:
oprot.writeFieldBegin('col_name', TType.STRING, 3)
oprot.writeString(self.col_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ColumnStatistics, ColumnStatistics.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ColumnStatistics()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = InvalidInputException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidObjectException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_column_statistics_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
- col_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
(4, TType.STRING, 'col_name', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_name=None, col_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
self.col_name = col_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.col_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_column_statistics_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
if self.col_name is not None:
oprot.writeFieldBegin('col_name', TType.STRING, 4)
oprot.writeString(self.col_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partition_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ColumnStatistics, ColumnStatistics.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ColumnStatistics()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = InvalidInputException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidObjectException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partition_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_statistics_req_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (TableStatsRequest, TableStatsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = TableStatsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_statistics_req_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_table_statistics_req_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TableStatsResult, TableStatsResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TableStatsResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_table_statistics_req_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_statistics_req_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (PartitionsStatsRequest, PartitionsStatsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = PartitionsStatsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_statistics_req_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_partitions_statistics_req_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (PartitionsStatsResult, PartitionsStatsResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = PartitionsStatsResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_partitions_statistics_req_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_aggr_stats_for_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (PartitionsStatsRequest, PartitionsStatsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = PartitionsStatsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_aggr_stats_for_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_aggr_stats_for_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (AggrStats, AggrStats.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = AggrStats()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_aggr_stats_for_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_aggr_stats_for_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (SetPartitionsStatsRequest, SetPartitionsStatsRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = SetPartitionsStatsRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_aggr_stats_for_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_aggr_stats_for_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_aggr_stats_for_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_partition_column_statistics_args:
"""
Attributes:
- db_name
- tbl_name
- part_name
- col_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'part_name', None, None, ), # 3
(4, TType.STRING, 'col_name', None, None, ), # 4
)
def __init__(self, db_name=None, tbl_name=None, part_name=None, col_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.part_name = part_name
self.col_name = col_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.part_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.col_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_partition_column_statistics_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.part_name is not None:
oprot.writeFieldBegin('part_name', TType.STRING, 3)
oprot.writeString(self.part_name)
oprot.writeFieldEnd()
if self.col_name is not None:
oprot.writeFieldBegin('col_name', TType.STRING, 4)
oprot.writeString(self.col_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_partition_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = InvalidObjectException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_partition_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_table_column_statistics_args:
"""
Attributes:
- db_name
- tbl_name
- col_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'db_name', None, None, ), # 1
(2, TType.STRING, 'tbl_name', None, None, ), # 2
(3, TType.STRING, 'col_name', None, None, ), # 3
)
def __init__(self, db_name=None, tbl_name=None, col_name=None,):
self.db_name = db_name
self.tbl_name = tbl_name
self.col_name = col_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.db_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.tbl_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.col_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_table_column_statistics_args')
if self.db_name is not None:
oprot.writeFieldBegin('db_name', TType.STRING, 1)
oprot.writeString(self.db_name)
oprot.writeFieldEnd()
if self.tbl_name is not None:
oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
oprot.writeString(self.tbl_name)
oprot.writeFieldEnd()
if self.col_name is not None:
oprot.writeFieldBegin('col_name', TType.STRING, 3)
oprot.writeString(self.col_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class delete_table_column_statistics_result:
"""
Attributes:
- success
- o1
- o2
- o3
- o4
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (InvalidInputException, InvalidInputException.thrift_spec), None, ), # 4
)
def __init__(self, success=None, o1=None, o2=None, o3=None, o4=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = InvalidObjectException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = InvalidInputException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('delete_table_column_statistics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_function_args:
"""
Attributes:
- func
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'func', (Function, Function.thrift_spec), None, ), # 1
)
def __init__(self, func=None,):
self.func = func
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.func = Function()
self.func.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_function_args')
if self.func is not None:
oprot.writeFieldBegin('func', TType.STRUCT, 1)
self.func.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_function_result:
"""
Attributes:
- o1
- o2
- o3
- o4
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'o4', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 4
)
def __init__(self, o1=None, o2=None, o3=None, o4=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
self.o4 = o4
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = AlreadyExistsException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = InvalidObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.o4 = NoSuchObjectException()
self.o4.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_function_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
if self.o4 is not None:
oprot.writeFieldBegin('o4', TType.STRUCT, 4)
self.o4.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_function_args:
"""
Attributes:
- dbName
- funcName
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbName', None, None, ), # 1
(2, TType.STRING, 'funcName', None, None, ), # 2
)
def __init__(self, dbName=None, funcName=None,):
self.dbName = dbName
self.funcName = funcName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.funcName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_function_args')
if self.dbName is not None:
oprot.writeFieldBegin('dbName', TType.STRING, 1)
oprot.writeString(self.dbName)
oprot.writeFieldEnd()
if self.funcName is not None:
oprot.writeFieldBegin('funcName', TType.STRING, 2)
oprot.writeString(self.funcName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_function_result:
"""
Attributes:
- o1
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o3=None,):
self.o1 = o1
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchObjectException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o3 = MetaException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_function_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 2)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_function_args:
"""
Attributes:
- dbName
- funcName
- newFunc
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbName', None, None, ), # 1
(2, TType.STRING, 'funcName', None, None, ), # 2
(3, TType.STRUCT, 'newFunc', (Function, Function.thrift_spec), None, ), # 3
)
def __init__(self, dbName=None, funcName=None, newFunc=None,):
self.dbName = dbName
self.funcName = funcName
self.newFunc = newFunc
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.funcName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.newFunc = Function()
self.newFunc.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_function_args')
if self.dbName is not None:
oprot.writeFieldBegin('dbName', TType.STRING, 1)
oprot.writeString(self.dbName)
oprot.writeFieldEnd()
if self.funcName is not None:
oprot.writeFieldBegin('funcName', TType.STRING, 2)
oprot.writeString(self.funcName)
oprot.writeFieldEnd()
if self.newFunc is not None:
oprot.writeFieldBegin('newFunc', TType.STRUCT, 3)
self.newFunc.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class alter_function_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (InvalidOperationException, InvalidOperationException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = InvalidOperationException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = MetaException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('alter_function_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_functions_args:
"""
Attributes:
- dbName
- pattern
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbName', None, None, ), # 1
(2, TType.STRING, 'pattern', None, None, ), # 2
)
def __init__(self, dbName=None, pattern=None,):
self.dbName = dbName
self.pattern = pattern
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.pattern = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_functions_args')
if self.dbName is not None:
oprot.writeFieldBegin('dbName', TType.STRING, 1)
oprot.writeString(self.dbName)
oprot.writeFieldEnd()
if self.pattern is not None:
oprot.writeFieldBegin('pattern', TType.STRING, 2)
oprot.writeString(self.pattern)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_functions_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype783, _size780) = iprot.readListBegin()
for _i784 in xrange(_size780):
_elem785 = iprot.readString();
self.success.append(_elem785)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_functions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter786 in self.success:
oprot.writeString(iter786)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_function_args:
"""
Attributes:
- dbName
- funcName
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'dbName', None, None, ), # 1
(2, TType.STRING, 'funcName', None, None, ), # 2
)
def __init__(self, dbName=None, funcName=None,):
self.dbName = dbName
self.funcName = funcName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.dbName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.funcName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_function_args')
if self.dbName is not None:
oprot.writeFieldBegin('dbName', TType.STRING, 1)
oprot.writeString(self.dbName)
oprot.writeFieldEnd()
if self.funcName is not None:
oprot.writeFieldBegin('funcName', TType.STRING, 2)
oprot.writeString(self.funcName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_function_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (Function, Function.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Function()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchObjectException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_function_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_role_args:
"""
Attributes:
- role
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'role', (Role, Role.thrift_spec), None, ), # 1
)
def __init__(self, role=None,):
self.role = role
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.role = Role()
self.role.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_role_args')
if self.role is not None:
oprot.writeFieldBegin('role', TType.STRUCT, 1)
self.role.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class create_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('create_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_role_args:
"""
Attributes:
- role_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'role_name', None, None, ), # 1
)
def __init__(self, role_name=None,):
self.role_name = role_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.role_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_role_args')
if self.role_name is not None:
oprot.writeFieldBegin('role_name', TType.STRING, 1)
oprot.writeString(self.role_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class drop_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('drop_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_role_names_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_role_names_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_role_names_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype790, _size787) = iprot.readListBegin()
for _i791 in xrange(_size787):
_elem792 = iprot.readString();
self.success.append(_elem792)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_role_names_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter793 in self.success:
oprot.writeString(iter793)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_role_args:
"""
Attributes:
- role_name
- principal_name
- principal_type
- grantor
- grantorType
- grant_option
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'role_name', None, None, ), # 1
(2, TType.STRING, 'principal_name', None, None, ), # 2
(3, TType.I32, 'principal_type', None, None, ), # 3
(4, TType.STRING, 'grantor', None, None, ), # 4
(5, TType.I32, 'grantorType', None, None, ), # 5
(6, TType.BOOL, 'grant_option', None, None, ), # 6
)
def __init__(self, role_name=None, principal_name=None, principal_type=None, grantor=None, grantorType=None, grant_option=None,):
self.role_name = role_name
self.principal_name = principal_name
self.principal_type = principal_type
self.grantor = grantor
self.grantorType = grantorType
self.grant_option = grant_option
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.role_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.principal_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.principal_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.grantor = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.grantorType = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.grant_option = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_role_args')
if self.role_name is not None:
oprot.writeFieldBegin('role_name', TType.STRING, 1)
oprot.writeString(self.role_name)
oprot.writeFieldEnd()
if self.principal_name is not None:
oprot.writeFieldBegin('principal_name', TType.STRING, 2)
oprot.writeString(self.principal_name)
oprot.writeFieldEnd()
if self.principal_type is not None:
oprot.writeFieldBegin('principal_type', TType.I32, 3)
oprot.writeI32(self.principal_type)
oprot.writeFieldEnd()
if self.grantor is not None:
oprot.writeFieldBegin('grantor', TType.STRING, 4)
oprot.writeString(self.grantor)
oprot.writeFieldEnd()
if self.grantorType is not None:
oprot.writeFieldBegin('grantorType', TType.I32, 5)
oprot.writeI32(self.grantorType)
oprot.writeFieldEnd()
if self.grant_option is not None:
oprot.writeFieldBegin('grant_option', TType.BOOL, 6)
oprot.writeBool(self.grant_option)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revoke_role_args:
"""
Attributes:
- role_name
- principal_name
- principal_type
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'role_name', None, None, ), # 1
(2, TType.STRING, 'principal_name', None, None, ), # 2
(3, TType.I32, 'principal_type', None, None, ), # 3
)
def __init__(self, role_name=None, principal_name=None, principal_type=None,):
self.role_name = role_name
self.principal_name = principal_name
self.principal_type = principal_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.role_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.principal_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.principal_type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revoke_role_args')
if self.role_name is not None:
oprot.writeFieldBegin('role_name', TType.STRING, 1)
oprot.writeString(self.role_name)
oprot.writeFieldEnd()
if self.principal_name is not None:
oprot.writeFieldBegin('principal_name', TType.STRING, 2)
oprot.writeString(self.principal_name)
oprot.writeFieldEnd()
if self.principal_type is not None:
oprot.writeFieldBegin('principal_type', TType.I32, 3)
oprot.writeI32(self.principal_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revoke_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revoke_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class list_roles_args:
"""
Attributes:
- principal_name
- principal_type
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'principal_name', None, None, ), # 1
(2, TType.I32, 'principal_type', None, None, ), # 2
)
def __init__(self, principal_name=None, principal_type=None,):
self.principal_name = principal_name
self.principal_type = principal_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.principal_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.principal_type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('list_roles_args')
if self.principal_name is not None:
oprot.writeFieldBegin('principal_name', TType.STRING, 1)
oprot.writeString(self.principal_name)
oprot.writeFieldEnd()
if self.principal_type is not None:
oprot.writeFieldBegin('principal_type', TType.I32, 2)
oprot.writeI32(self.principal_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class list_roles_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(Role, Role.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype797, _size794) = iprot.readListBegin()
for _i798 in xrange(_size794):
_elem799 = Role()
_elem799.read(iprot)
self.success.append(_elem799)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('list_roles_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter800 in self.success:
iter800.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_revoke_role_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GrantRevokeRoleRequest, GrantRevokeRoleRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GrantRevokeRoleRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_revoke_role_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_revoke_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GrantRevokeRoleResponse, GrantRevokeRoleResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GrantRevokeRoleResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_revoke_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_principals_in_role_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GetPrincipalsInRoleRequest, GetPrincipalsInRoleRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GetPrincipalsInRoleRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_principals_in_role_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_principals_in_role_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetPrincipalsInRoleResponse, GetPrincipalsInRoleResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetPrincipalsInRoleResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_principals_in_role_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_role_grants_for_principal_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GetRoleGrantsForPrincipalRequest, GetRoleGrantsForPrincipalRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GetRoleGrantsForPrincipalRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_role_grants_for_principal_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_role_grants_for_principal_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetRoleGrantsForPrincipalResponse, GetRoleGrantsForPrincipalResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetRoleGrantsForPrincipalResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_role_grants_for_principal_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_privilege_set_args:
"""
Attributes:
- hiveObject
- user_name
- group_names
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'hiveObject', (HiveObjectRef, HiveObjectRef.thrift_spec), None, ), # 1
(2, TType.STRING, 'user_name', None, None, ), # 2
(3, TType.LIST, 'group_names', (TType.STRING,None), None, ), # 3
)
def __init__(self, hiveObject=None, user_name=None, group_names=None,):
self.hiveObject = hiveObject
self.user_name = user_name
self.group_names = group_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.hiveObject = HiveObjectRef()
self.hiveObject.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.user_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.group_names = []
(_etype804, _size801) = iprot.readListBegin()
for _i805 in xrange(_size801):
_elem806 = iprot.readString();
self.group_names.append(_elem806)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_privilege_set_args')
if self.hiveObject is not None:
oprot.writeFieldBegin('hiveObject', TType.STRUCT, 1)
self.hiveObject.write(oprot)
oprot.writeFieldEnd()
if self.user_name is not None:
oprot.writeFieldBegin('user_name', TType.STRING, 2)
oprot.writeString(self.user_name)
oprot.writeFieldEnd()
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.group_names))
for iter807 in self.group_names:
oprot.writeString(iter807)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_privilege_set_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (PrincipalPrivilegeSet, PrincipalPrivilegeSet.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = PrincipalPrivilegeSet()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_privilege_set_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class list_privileges_args:
"""
Attributes:
- principal_name
- principal_type
- hiveObject
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'principal_name', None, None, ), # 1
(2, TType.I32, 'principal_type', None, None, ), # 2
(3, TType.STRUCT, 'hiveObject', (HiveObjectRef, HiveObjectRef.thrift_spec), None, ), # 3
)
def __init__(self, principal_name=None, principal_type=None, hiveObject=None,):
self.principal_name = principal_name
self.principal_type = principal_type
self.hiveObject = hiveObject
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.principal_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.principal_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.hiveObject = HiveObjectRef()
self.hiveObject.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('list_privileges_args')
if self.principal_name is not None:
oprot.writeFieldBegin('principal_name', TType.STRING, 1)
oprot.writeString(self.principal_name)
oprot.writeFieldEnd()
if self.principal_type is not None:
oprot.writeFieldBegin('principal_type', TType.I32, 2)
oprot.writeI32(self.principal_type)
oprot.writeFieldEnd()
if self.hiveObject is not None:
oprot.writeFieldBegin('hiveObject', TType.STRUCT, 3)
self.hiveObject.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class list_privileges_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(HiveObjectPrivilege, HiveObjectPrivilege.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype811, _size808) = iprot.readListBegin()
for _i812 in xrange(_size808):
_elem813 = HiveObjectPrivilege()
_elem813.read(iprot)
self.success.append(_elem813)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('list_privileges_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter814 in self.success:
iter814.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_privileges_args:
"""
Attributes:
- privileges
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'privileges', (PrivilegeBag, PrivilegeBag.thrift_spec), None, ), # 1
)
def __init__(self, privileges=None,):
self.privileges = privileges
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.privileges = PrivilegeBag()
self.privileges.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_privileges_args')
if self.privileges is not None:
oprot.writeFieldBegin('privileges', TType.STRUCT, 1)
self.privileges.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_privileges_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_privileges_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revoke_privileges_args:
"""
Attributes:
- privileges
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'privileges', (PrivilegeBag, PrivilegeBag.thrift_spec), None, ), # 1
)
def __init__(self, privileges=None,):
self.privileges = privileges
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.privileges = PrivilegeBag()
self.privileges.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revoke_privileges_args')
if self.privileges is not None:
oprot.writeFieldBegin('privileges', TType.STRUCT, 1)
self.privileges.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revoke_privileges_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revoke_privileges_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_revoke_privileges_args:
"""
Attributes:
- request
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'request', (GrantRevokePrivilegeRequest, GrantRevokePrivilegeRequest.thrift_spec), None, ), # 1
)
def __init__(self, request=None,):
self.request = request
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.request = GrantRevokePrivilegeRequest()
self.request.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_revoke_privileges_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 1)
self.request.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class grant_revoke_privileges_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GrantRevokePrivilegeResponse, GrantRevokePrivilegeResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GrantRevokePrivilegeResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('grant_revoke_privileges_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_ugi_args:
"""
Attributes:
- user_name
- group_names
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'user_name', None, None, ), # 1
(2, TType.LIST, 'group_names', (TType.STRING,None), None, ), # 2
)
def __init__(self, user_name=None, group_names=None,):
self.user_name = user_name
self.group_names = group_names
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.user_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.group_names = []
(_etype818, _size815) = iprot.readListBegin()
for _i819 in xrange(_size815):
_elem820 = iprot.readString();
self.group_names.append(_elem820)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_ugi_args')
if self.user_name is not None:
oprot.writeFieldBegin('user_name', TType.STRING, 1)
oprot.writeString(self.user_name)
oprot.writeFieldEnd()
if self.group_names is not None:
oprot.writeFieldBegin('group_names', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.group_names))
for iter821 in self.group_names:
oprot.writeString(iter821)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class set_ugi_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype825, _size822) = iprot.readListBegin()
for _i826 in xrange(_size822):
_elem827 = iprot.readString();
self.success.append(_elem827)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('set_ugi_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter828 in self.success:
oprot.writeString(iter828)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_delegation_token_args:
"""
Attributes:
- token_owner
- renewer_kerberos_principal_name
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'token_owner', None, None, ), # 1
(2, TType.STRING, 'renewer_kerberos_principal_name', None, None, ), # 2
)
def __init__(self, token_owner=None, renewer_kerberos_principal_name=None,):
self.token_owner = token_owner
self.renewer_kerberos_principal_name = renewer_kerberos_principal_name
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.token_owner = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.renewer_kerberos_principal_name = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_delegation_token_args')
if self.token_owner is not None:
oprot.writeFieldBegin('token_owner', TType.STRING, 1)
oprot.writeString(self.token_owner)
oprot.writeFieldEnd()
if self.renewer_kerberos_principal_name is not None:
oprot.writeFieldBegin('renewer_kerberos_principal_name', TType.STRING, 2)
oprot.writeString(self.renewer_kerberos_principal_name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_delegation_token_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_delegation_token_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class renew_delegation_token_args:
"""
Attributes:
- token_str_form
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'token_str_form', None, None, ), # 1
)
def __init__(self, token_str_form=None,):
self.token_str_form = token_str_form
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.token_str_form = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('renew_delegation_token_args')
if self.token_str_form is not None:
oprot.writeFieldBegin('token_str_form', TType.STRING, 1)
oprot.writeString(self.token_str_form)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class renew_delegation_token_result:
"""
Attributes:
- success
- o1
"""
thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, o1=None,):
self.success = success
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('renew_delegation_token_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class cancel_delegation_token_args:
"""
Attributes:
- token_str_form
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'token_str_form', None, None, ), # 1
)
def __init__(self, token_str_form=None,):
self.token_str_form = token_str_form
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.token_str_form = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('cancel_delegation_token_args')
if self.token_str_form is not None:
oprot.writeFieldBegin('token_str_form', TType.STRING, 1)
oprot.writeString(self.token_str_form)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class cancel_delegation_token_result:
"""
Attributes:
- o1
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), # 1
)
def __init__(self, o1=None,):
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = MetaException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('cancel_delegation_token_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_open_txns_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_open_txns_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_open_txns_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetOpenTxnsResponse, GetOpenTxnsResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetOpenTxnsResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_open_txns_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_open_txns_info_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_open_txns_info_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_open_txns_info_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (GetOpenTxnsInfoResponse, GetOpenTxnsInfoResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = GetOpenTxnsInfoResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_open_txns_info_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class open_txns_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (OpenTxnRequest, OpenTxnRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = OpenTxnRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('open_txns_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class open_txns_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (OpenTxnsResponse, OpenTxnsResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = OpenTxnsResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('open_txns_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class abort_txn_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (AbortTxnRequest, AbortTxnRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = AbortTxnRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('abort_txn_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class abort_txn_result:
"""
Attributes:
- o1
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchTxnException, NoSuchTxnException.thrift_spec), None, ), # 1
)
def __init__(self, o1=None,):
self.o1 = o1
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchTxnException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('abort_txn_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class commit_txn_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (CommitTxnRequest, CommitTxnRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = CommitTxnRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('commit_txn_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class commit_txn_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchTxnException, NoSuchTxnException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (TxnAbortedException, TxnAbortedException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchTxnException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = TxnAbortedException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('commit_txn_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class lock_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (LockRequest, LockRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = LockRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('lock_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class lock_result:
"""
Attributes:
- success
- o1
- o2
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (LockResponse, LockResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchTxnException, NoSuchTxnException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (TxnAbortedException, TxnAbortedException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, o1=None, o2=None,):
self.success = success
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LockResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchTxnException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = TxnAbortedException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('lock_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class check_lock_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (CheckLockRequest, CheckLockRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = CheckLockRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('check_lock_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class check_lock_result:
"""
Attributes:
- success
- o1
- o2
- o3
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (LockResponse, LockResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'o1', (NoSuchTxnException, NoSuchTxnException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (TxnAbortedException, TxnAbortedException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (NoSuchLockException, NoSuchLockException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, o1=None, o2=None, o3=None,):
self.success = success
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LockResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchTxnException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = TxnAbortedException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = NoSuchLockException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('check_lock_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unlock_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (UnlockRequest, UnlockRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = UnlockRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unlock_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unlock_result:
"""
Attributes:
- o1
- o2
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchLockException, NoSuchLockException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (TxnOpenException, TxnOpenException.thrift_spec), None, ), # 2
)
def __init__(self, o1=None, o2=None,):
self.o1 = o1
self.o2 = o2
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchLockException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = TxnOpenException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unlock_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class show_locks_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (ShowLocksRequest, ShowLocksRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = ShowLocksRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('show_locks_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class show_locks_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ShowLocksResponse, ShowLocksResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ShowLocksResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('show_locks_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class heartbeat_args:
"""
Attributes:
- ids
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ids', (HeartbeatRequest, HeartbeatRequest.thrift_spec), None, ), # 1
)
def __init__(self, ids=None,):
self.ids = ids
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ids = HeartbeatRequest()
self.ids.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('heartbeat_args')
if self.ids is not None:
oprot.writeFieldBegin('ids', TType.STRUCT, 1)
self.ids.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class heartbeat_result:
"""
Attributes:
- o1
- o2
- o3
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'o1', (NoSuchLockException, NoSuchLockException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'o2', (NoSuchTxnException, NoSuchTxnException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'o3', (TxnAbortedException, TxnAbortedException.thrift_spec), None, ), # 3
)
def __init__(self, o1=None, o2=None, o3=None,):
self.o1 = o1
self.o2 = o2
self.o3 = o3
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.o1 = NoSuchLockException()
self.o1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.o2 = NoSuchTxnException()
self.o2.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.o3 = TxnAbortedException()
self.o3.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('heartbeat_result')
if self.o1 is not None:
oprot.writeFieldBegin('o1', TType.STRUCT, 1)
self.o1.write(oprot)
oprot.writeFieldEnd()
if self.o2 is not None:
oprot.writeFieldBegin('o2', TType.STRUCT, 2)
self.o2.write(oprot)
oprot.writeFieldEnd()
if self.o3 is not None:
oprot.writeFieldBegin('o3', TType.STRUCT, 3)
self.o3.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class heartbeat_txn_range_args:
"""
Attributes:
- txns
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'txns', (HeartbeatTxnRangeRequest, HeartbeatTxnRangeRequest.thrift_spec), None, ), # 1
)
def __init__(self, txns=None,):
self.txns = txns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.txns = HeartbeatTxnRangeRequest()
self.txns.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('heartbeat_txn_range_args')
if self.txns is not None:
oprot.writeFieldBegin('txns', TType.STRUCT, 1)
self.txns.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class heartbeat_txn_range_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (HeartbeatTxnRangeResponse, HeartbeatTxnRangeResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = HeartbeatTxnRangeResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('heartbeat_txn_range_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class compact_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (CompactionRequest, CompactionRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = CompactionRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('compact_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class compact_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('compact_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class show_compact_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (ShowCompactRequest, ShowCompactRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = ShowCompactRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('show_compact_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class show_compact_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ShowCompactResponse, ShowCompactResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ShowCompactResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('show_compact_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_next_notification_args:
"""
Attributes:
- rqst
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'rqst', (NotificationEventRequest, NotificationEventRequest.thrift_spec), None, ), # 1
)
def __init__(self, rqst=None,):
self.rqst = rqst
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.rqst = NotificationEventRequest()
self.rqst.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_next_notification_args')
if self.rqst is not None:
oprot.writeFieldBegin('rqst', TType.STRUCT, 1)
self.rqst.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_next_notification_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NotificationEventResponse, NotificationEventResponse.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NotificationEventResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_next_notification_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_current_notificationEventId_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_current_notificationEventId_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_current_notificationEventId_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (CurrentNotificationEventId, CurrentNotificationEventId.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = CurrentNotificationEventId()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_current_notificationEventId_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| apache-2.0 |
stianvi/ansible-modules-core | cloud/amazon/ec2_metric_alarm.py | 53 | 10776 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
module: ec2_metric_alarm
short_description: "Create/update or delete AWS Cloudwatch 'metric alarms'"
description:
- Can create or delete AWS metric alarms.
- Metrics you wish to alarm on must already exist.
version_added: "1.6"
author: "Zacharie Eakin (@zeekin)"
options:
state:
description:
- register or deregister the alarm
required: true
choices: ['present', 'absent']
name:
description:
- Unique name for the alarm
required: true
metric:
description:
- Name of the monitored metric (e.g. CPUUtilization)
- Metric must already exist
required: false
namespace:
description:
- Name of the appropriate namespace ('AWS/EC2', 'System/Linux', etc.), which determines the category it will appear under in cloudwatch
required: false
statistic:
description:
- Operation applied to the metric
- Works in conjunction with period and evaluation_periods to determine the comparison value
required: false
options: ['SampleCount','Average','Sum','Minimum','Maximum']
comparison:
description:
- Determines how the threshold value is compared
required: false
options: ['<=','<','>','>=']
threshold:
description:
- Sets the min/max bound for triggering the alarm
required: false
period:
description:
- The time (in seconds) between metric evaluations
required: false
evaluation_periods:
description:
- The number of times in which the metric is evaluated before final calculation
required: false
unit:
description:
- The threshold's unit of measurement
required: false
options: ['Seconds','Microseconds','Milliseconds','Bytes','Kilobytes','Megabytes','Gigabytes','Terabytes','Bits','Kilobits','Megabits','Gigabits','Terabits','Percent','Count','Bytes/Second','Kilobytes/Second','Megabytes/Second','Gigabytes/Second','Terabytes/Second','Bits/Second','Kilobits/Second','Megabits/Second','Gigabits/Second','Terabits/Second','Count/Second','None']
description:
description:
- A longer description of the alarm
required: false
dimensions:
description:
- Describes to what the alarm is applied
required: false
alarm_actions:
description:
- A list of the names action(s) taken when the alarm is in the 'alarm' status
required: false
insufficient_data_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'insufficient_data' status
required: false
ok_actions:
description:
- A list of the names of action(s) to take when the alarm is in the 'ok' status
required: false
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = '''
- name: create alarm
ec2_metric_alarm:
state: present
region: ap-southeast-2
name: "cpu-low"
metric: "CPUUtilization"
namespace: "AWS/EC2"
statistic: Average
comparison: "<="
threshold: 5.0
period: 300
evaluation_periods: 3
unit: "Percent"
description: "This will alarm when a bamboo slave's cpu usage average is lower than 5% for 15 minutes "
dimensions: {'InstanceId':'i-XXX'}
alarm_actions: ["action1","action2"]
'''
try:
import boto.ec2.cloudwatch
from boto.ec2.cloudwatch import CloudWatchConnection, MetricAlarm
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_metric_alarm(connection, module):
name = module.params.get('name')
metric = module.params.get('metric')
namespace = module.params.get('namespace')
statistic = module.params.get('statistic')
comparison = module.params.get('comparison')
threshold = module.params.get('threshold')
period = module.params.get('period')
evaluation_periods = module.params.get('evaluation_periods')
unit = module.params.get('unit')
description = module.params.get('description')
dimensions = module.params.get('dimensions')
alarm_actions = module.params.get('alarm_actions')
insufficient_data_actions = module.params.get('insufficient_data_actions')
ok_actions = module.params.get('ok_actions')
alarms = connection.describe_alarms(alarm_names=[name])
if not alarms:
alm = MetricAlarm(
name=name,
metric=metric,
namespace=namespace,
statistic=statistic,
comparison=comparison,
threshold=threshold,
period=period,
evaluation_periods=evaluation_periods,
unit=unit,
description=description,
dimensions=dimensions,
alarm_actions=alarm_actions,
insufficient_data_actions=insufficient_data_actions,
ok_actions=ok_actions
)
try:
connection.create_alarm(alm)
changed = True
alarms = connection.describe_alarms(alarm_names=[name])
except BotoServerError as e:
module.fail_json(msg=str(e))
else:
alarm = alarms[0]
changed = False
for attr in ('comparison','metric','namespace','statistic','threshold','period','evaluation_periods','unit','description'):
if getattr(alarm, attr) != module.params.get(attr):
changed = True
setattr(alarm, attr, module.params.get(attr))
#this is to deal with a current bug where you cannot assign '<=>' to the comparator when modifying an existing alarm
comparison = alarm.comparison
comparisons = {'<=' : 'LessThanOrEqualToThreshold', '<' : 'LessThanThreshold', '>=' : 'GreaterThanOrEqualToThreshold', '>' : 'GreaterThanThreshold'}
alarm.comparison = comparisons[comparison]
dim1 = module.params.get('dimensions')
dim2 = alarm.dimensions
for keys in dim1:
if not isinstance(dim1[keys], list):
dim1[keys] = [dim1[keys]]
if keys not in dim2 or dim1[keys] != dim2[keys]:
changed=True
setattr(alarm, 'dimensions', dim1)
for attr in ('alarm_actions','insufficient_data_actions','ok_actions'):
action = module.params.get(attr) or []
if getattr(alarm, attr) != action:
changed = True
setattr(alarm, attr, module.params.get(attr))
try:
if changed:
connection.create_alarm(alarm)
except BotoServerError as e:
module.fail_json(msg=str(e))
result = alarms[0]
module.exit_json(changed=changed, name=result.name,
actions_enabled=result.actions_enabled,
alarm_actions=result.alarm_actions,
alarm_arn=result.alarm_arn,
comparison=result.comparison,
description=result.description,
dimensions=result.dimensions,
evaluation_periods=result.evaluation_periods,
insufficient_data_actions=result.insufficient_data_actions,
last_updated=result.last_updated,
metric=result.metric,
namespace=result.namespace,
ok_actions=result.ok_actions,
period=result.period,
state_reason=result.state_reason,
state_value=result.state_value,
statistic=result.statistic,
threshold=result.threshold,
unit=result.unit)
def delete_metric_alarm(connection, module):
name = module.params.get('name')
alarms = connection.describe_alarms(alarm_names=[name])
if alarms:
try:
connection.delete_alarms([name])
module.exit_json(changed=True)
except BotoServerError as e:
module.fail_json(msg=str(e))
else:
module.exit_json(changed=False)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True, type='str'),
metric=dict(type='str'),
namespace=dict(type='str'),
statistic=dict(type='str', choices=['SampleCount', 'Average', 'Sum', 'Minimum', 'Maximum']),
comparison=dict(type='str', choices=['<=', '<', '>', '>=']),
threshold=dict(type='float'),
period=dict(type='int'),
unit=dict(type='str', choices=['Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes', 'Megabytes', 'Gigabytes', 'Terabytes', 'Bits', 'Kilobits', 'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count', 'Bytes/Second', 'Kilobytes/Second', 'Megabytes/Second', 'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second', 'Kilobits/Second', 'Megabits/Second', 'Gigabits/Second', 'Terabits/Second', 'Count/Second', 'None']),
evaluation_periods=dict(type='int'),
description=dict(type='str'),
dimensions=dict(type='dict', default={}),
alarm_actions=dict(type='list'),
insufficient_data_actions=dict(type='list'),
ok_actions=dict(type='list'),
state=dict(default='present', choices=['present', 'absent']),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.ec2.cloudwatch, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
if state == 'present':
create_metric_alarm(connection, module)
elif state == 'absent':
delete_metric_alarm(connection, module)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
resmo/ansible | test/units/module_utils/common/text/converters/test_container_to_text.py | 71 | 2664 | # -*- coding: utf-8 -*-
# Copyright 2019, Andrew Klychkov @Andersson007 <aaklychkov@mail.ru>
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.text.converters import container_to_text
DEFAULT_ENCODING = 'utf-8'
DEFAULT_ERR_HANDLER = 'surrogate_or_strict'
@pytest.mark.parametrize(
'test_input,expected',
[
({1: 1}, {1: 1}),
([1, 2], [1, 2]),
((1, 2), (1, 2)),
(1, 1),
(1.1, 1.1),
(b'str', u'str'),
(u'str', u'str'),
([b'str'], [u'str']),
((b'str'), (u'str')),
({b'str': b'str'}, {u'str': u'str'}),
]
)
@pytest.mark.parametrize('encoding', ['utf-8', 'latin1', 'shift-jis', 'big5', 'koi8_r', ])
@pytest.mark.parametrize('errors', ['strict', 'surrogate_or_strict', 'surrogate_then_replace', ])
def test_container_to_text_different_types(test_input, expected, encoding, errors):
"""Test for passing objects to container_to_text()."""
assert container_to_text(test_input, encoding=encoding, errors=errors) == expected
@pytest.mark.parametrize(
'test_input,expected',
[
({1: 1}, {1: 1}),
([1, 2], [1, 2]),
((1, 2), (1, 2)),
(1, 1),
(1.1, 1.1),
(True, True),
(None, None),
(u'str', u'str'),
(u'くらとみ'.encode(DEFAULT_ENCODING), u'くらとみ'),
(u'café'.encode(DEFAULT_ENCODING), u'café'),
(u'str'.encode(DEFAULT_ENCODING), u'str'),
([u'str'.encode(DEFAULT_ENCODING)], [u'str']),
((u'str'.encode(DEFAULT_ENCODING)), (u'str')),
({b'str': b'str'}, {u'str': u'str'}),
]
)
def test_container_to_text_default_encoding_and_err(test_input, expected):
"""
Test for passing objects to container_to_text(). Default encoding and errors
"""
assert container_to_text(test_input, encoding=DEFAULT_ENCODING,
errors=DEFAULT_ERR_HANDLER) == expected
@pytest.mark.parametrize(
'test_input,encoding,expected',
[
(u'й'.encode('utf-8'), 'latin1', u'й'),
(u'café'.encode('utf-8'), 'shift_jis', u'cafテゥ'),
]
)
@pytest.mark.parametrize('errors', ['strict', 'surrogate_or_strict', 'surrogate_then_replace', ])
def test_container_to_text_incomp_encod_chars(test_input, encoding, errors, expected):
"""
Test for passing incompatible characters and encodings container_to_text().
"""
assert container_to_text(test_input, encoding=encoding, errors=errors) == expected
| gpl-3.0 |
CyanogenMod/android_kernel_lge_g3 | tools/perf/scripts/python/net_dropmonitor.py | 4235 | 1554 | # Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
| gpl-2.0 |
shumashv1/android_kernel_hp_tenderloin | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
viggates/nova | plugins/xenserver/xenapi/etc/xapi.d/plugins/pluginlib_nova.py | 16 | 4778 | # Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Helper functions for the Nova xapi plugins. In time, this will merge
# with the pluginlib.py shipped with xapi, but for now, that file is not
# very stable, so it's easiest just to have a copy of all the functions
# that we need.
#
import gettext
import logging
import logging.handlers
import time
import XenAPI
translations = gettext.translation('nova', fallback=True)
_ = translations.ugettext
# Logging setup
def configure_logging(name):
log = logging.getLogger()
log.setLevel(logging.DEBUG)
sysh = logging.handlers.SysLogHandler('/dev/log')
sysh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%s: %%(levelname)-8s %%(message)s' % name)
sysh.setFormatter(formatter)
log.addHandler(sysh)
# Exceptions
class PluginError(Exception):
"""Base Exception class for all plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class ArgumentError(PluginError):
"""Raised when required arguments are missing, argument values are invalid,
or incompatible arguments are given.
"""
def __init__(self, *args):
PluginError.__init__(self, *args)
# Argument validation
def exists(args, key):
"""Validates that a freeform string argument to a RPC method call is given.
Returns the string.
"""
if key in args:
return args[key]
else:
raise ArgumentError(_('Argument %s is required.') % key)
def optional(args, key):
"""If the given key is in args, return the corresponding value, otherwise
return None
"""
return key in args and args[key] or None
def _get_domain_0(session):
this_host_ref = session.xenapi.session.get_this_host(session.handle)
expr = 'field "is_control_domain" = "true" and field "resident_on" = "%s"'
expr = expr % this_host_ref
return session.xenapi.VM.get_all_records_where(expr).keys()[0]
def with_vdi_in_dom0(session, vdi, read_only, f):
dom0 = _get_domain_0(session)
vbd_rec = {}
vbd_rec['VM'] = dom0
vbd_rec['VDI'] = vdi
vbd_rec['userdevice'] = 'autodetect'
vbd_rec['bootable'] = False
vbd_rec['mode'] = read_only and 'RO' or 'RW'
vbd_rec['type'] = 'disk'
vbd_rec['unpluggable'] = True
vbd_rec['empty'] = False
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
logging.debug(_('Creating VBD for VDI %s ... '), vdi)
vbd = session.xenapi.VBD.create(vbd_rec)
logging.debug(_('Creating VBD for VDI %s done.'), vdi)
try:
logging.debug(_('Plugging VBD %s ... '), vbd)
session.xenapi.VBD.plug(vbd)
logging.debug(_('Plugging VBD %s done.'), vbd)
return f(session.xenapi.VBD.get_device(vbd))
finally:
logging.debug(_('Destroying VBD for VDI %s ... '), vdi)
_vbd_unplug_with_retry(session, vbd)
try:
session.xenapi.VBD.destroy(vbd)
except XenAPI.Failure, e: # noqa
logging.error(_('Ignoring XenAPI.Failure %s'), e)
logging.debug(_('Destroying VBD for VDI %s done.'), vdi)
def _vbd_unplug_with_retry(session, vbd):
"""Call VBD.unplug on the given VBD, with a retry if we get
DEVICE_DETACH_REJECTED. For reasons which I don't understand, we're
seeing the device still in use, even when all processes using the device
should be dead.
"""
while True:
try:
session.xenapi.VBD.unplug(vbd)
logging.debug(_('VBD.unplug successful first time.'))
return
except XenAPI.Failure, e: # noqa
if (len(e.details) > 0 and
e.details[0] == 'DEVICE_DETACH_REJECTED'):
logging.debug(_('VBD.unplug rejected: retrying...'))
time.sleep(1)
elif (len(e.details) > 0 and
e.details[0] == 'DEVICE_ALREADY_DETACHED'):
logging.debug(_('VBD.unplug successful eventually.'))
return
else:
logging.error(_('Ignoring XenAPI.Failure in VBD.unplug: %s'),
e)
return
| apache-2.0 |
cbeloni/quokka | quokka/utils/populate.py | 1 | 9597 | # coding: utf-8
import logging
import json
import uuid
from quokka.core.models import Channel, ChannelType, SubContentPurpose, \
Config, CustomValue, License
from quokka.modules.accounts.models import User, Role
from quokka.modules.posts.models import Post
logger = logging.getLogger()
class Populate(object):
def __init__(self, db, *args, **kwargs):
self.db = db
self.args = args
self.kwargs = kwargs
self.roles = {}
self.users = {}
self.channels = {}
self.channel_types = {}
self.purposes = {}
self.custom_values = {}
self.load_fixtures()
def __call__(self, *args, **kwargs):
self.load_existing_users()
self.create_users()
self.create_configs()
self.create_channel_types()
self.create_base_channels()
self.create_channels()
self.create_purposes()
self.create_posts()
def generate_random_password(self):
return uuid.uuid4().hex
def create_initial_superuser(self):
password = self.generate_random_password()
user_data = {
"name": "Quokka Admin",
"email": "admin@quokkaproject.org",
"gravatar_email": "rochacbruno+quokka@gmail.com",
"password": password[:6],
"roles": ["admin"],
"bio": "Quokka Example Admin",
"tagline": "Quokka is the best CMS!",
"links": [
{
"title": "facebook",
"link": "http://facebook.com/quokkaproject",
"icon": "facebook",
"css_class": "facebook",
"order": 0
},
{
"title": "github",
"link": "http://github.com/quokkaproject",
"icon": "github",
"css_class": "github",
"order": 0
},
{
"title": "twitter",
"link": "http://twitter.com/quokkaproject",
"icon": "twitter",
"css_class": "twitter",
"order": 0
}
]
}
user_obj = self.create_user(user_data)
return user_data, user_obj
def load_fixtures(self):
filepath = self.kwargs.get('filepath',
'./etc/fixtures/initial_data.json')
_file = open(filepath)
self.json_data = json.load(_file)
def role(self, name):
if name not in self.roles:
role, created = Role.objects.get_or_create(name=name)
self.roles[name] = role
if created:
logger.info("Created role: %s", name)
return self.roles.get(name)
def load_existing_users(self):
users = User.objects.all()
for user in users:
self.users[user.name] = user
def create_user(self, data):
name = data.get('name')
if name not in self.users:
pwd = data.get("password")
data['roles'] = [self.role(role) for role in data.get('roles')]
user = User.createuser(**data)
self.users[name] = user
logger.info("Created: User: mail:%s pwd:%s", user.email, pwd)
return user
else:
logger.info("Exist: User: mail: %s", data.get('email'))
def create_users(self, data=None):
self.users_data = data or self.json_data.get('users')
for data in self.users_data:
self.create_user(data)
def create_config(self, data):
try:
return Config.objects.get(group=data.get('group'))
except:
return Config.objects.create(**data)
def custom_value(self, **data):
if data.get('name') in self.custom_values:
return self.custom_values[data.get('name')]
value = CustomValue(**data)
self.custom_values[value.name] = value
return value
def create_configs(self):
self.configs_data = self.json_data.get('configs')
for config in self.configs_data:
config['values'] = [self.custom_value(**args)
for args in config.get('values')]
self.create_config(config)
def create_channel(self, data):
if 'childs' in data:
childs = data.pop('childs')
else:
childs = []
data['created_by'] = data['last_updated_by'] = self.users.get('admin')
_type = data.get('channel_type')
data['channel_type'] = self.channel_types.get(_type)
try:
channel = Channel.objects.get(slug=data.get('slug'))
created = False
except:
channel, created = Channel.objects.get_or_create(**data)
if created:
logger.info("Created channel: %s", channel.title)
else:
logger.info("Channel get: %s", channel.title)
for child in childs:
child['parent'] = channel
self.create_channel(child)
if channel.slug not in self.channels:
self.channels[channel.slug] = channel
return channel
def create_channel_type(self, data):
try:
channel_type = ChannelType.objects.get(
identifier=data.get('identifier'))
created = False
except:
channel_type, created = ChannelType.objects.get_or_create(
**data
)
if created:
logger.info("Created channel_type: %s", channel_type.title)
else:
logger.info("ChannelType get: %s", channel_type.title)
if channel_type.identifier not in self.channel_types:
self.channel_types[channel_type.identifier] = channel_type
return channel_type
def create_base_channels(self):
self.channel_data = self.json_data.get('base_channels')
for data in self.channel_data:
self.create_channel(data)
def create_channels(self):
self.channel_data = self.json_data.get('channels')
for data in self.channel_data:
self.create_channel(data)
def create_channel_types(self):
self.channel_type_data = self.json_data.get('channel_types')
for data in self.channel_type_data:
self.create_channel_type(data)
def create_purpose(self, data):
if data.get('identifier') in self.purposes:
return self.purposes[data.get('identifier')]
purpose, created = SubContentPurpose.objects.get_or_create(
title=data.get('title'),
identifier=data.get('identifier'),
module=data.get('module')
)
self.purposes[purpose.identifier] = purpose
return purpose
def create_purposes(self):
self.purpose_data = self.json_data.get('purposes')
for purpose in self.purpose_data:
self.create_purpose(purpose)
def create_initial_post(self, user_data=None, user_obj=None):
post_data = {
"title": "Try Quokka CMS! write a post.",
"summary": (
"Use default credentials to access "
"/admin \r\n"
"user: {user[email]} \r\n"
"pass: {user[password]} \r\n"
).format(user=user_data),
"slug": "try-quokka-cms",
"tags": ["quokka"],
"body": (
"## You can try Quokka ADMIN\r\n\r\n"
"Create some posts\r\n\r\n"
"> Use default credentials to access "
"[/admin](/admin) \r\n\r\n"
"- user: {user[email]}\r\n"
"- password: {user[password]}\r\n"
"> ATTENTION! Copy the credentials and delete this post"
).format(user=user_data),
"license": {
"title": "Creative Commons",
"link": "http://creativecommons.com",
"identifier": "creative_commons_by_nc_nd"
},
"content_format": "markdown"
}
post_data['channel'] = self.channels.get("home")
post_data["created_by"] = user_obj or User.objects.first()
post = self.create_post(post_data)
return post
def create_post(self, data):
if not data.get('created_by'):
data['created_by'] = self.users.get('admin')
data['last_updated_by'] = data['created_by']
data['published'] = True
if 'license' in data and not isinstance(data['license'], License):
data['license'] = License(**data['license'])
try:
post = Post.objects.get(slug=data.get('slug'))
logger.info("Post get: %s", post.title)
except:
post = Post.objects.create(**data)
logger.info("Post created: %s", post.title)
# post.created_by = self.users.get('admin')
# post.save()
return post
def create_posts(self):
self.post_data = self.json_data.get('posts')
for data in self.post_data:
_channel = data.get('channel')
data['channel'] = self.channels.get(_channel)
related_channels = data.get('related_channels', [])
data['related_channels'] = [
self.channels.get(_related)
for _related in related_channels
]
try:
self.create_post(data)
except:
self.create_channels()
self.create_post(data)
| mit |
dtrip/weevely3 | utils/http.py | 2 | 1239 | from core.weexceptions import FatalException
from core import messages
from core import config
import random
import string
import utils
import urllib2
import os
agents_list_path = 'utils/_http/user-agents.txt'
def load_all_agents():
try:
agents_file = open(
os.path.join(config.weevely_path,
agents_list_path)
)
except Exception as e:
raise FatalException(
messages.generic.error_loading_file_s_s %
(agents_list_path, str(e)))
return agents_file.read().split('\n')
def add_random_url_param(url):
random_param = '%s=%s' % (
utils.strings.randstr(
n = 4,
fixed = False,
charset = string.letters
),
utils.strings.randstr(
n = 10,
fixed = False
)
)
if '?' not in url:
url += '?%s' % random_param
else:
url += '&%s' % random_param
return url
def request(url, headers = []):
if not next((x for x in headers if x[0] == 'User-Agent'), False):
headers = [ ('User-Agent', random.choice(load_all_agents())) ]
opener = urllib2.build_opener()
opener.addheaders = headers
return opener.open(url).read() | gpl-3.0 |
bkahlert/seqan-research | raw/workshop13/workshop2013-data-20130926/trunk/util/py_lib/seqan/dox/pure.py | 2 | 4798 | #!/usr/bin/python
"""Implementation of the SeqAn Doxygen dialect.
"""
import argparse
import logging
import os
import re
import sys
import file_mgr
import lexer
import dox_tokens
import dox_parser
import proc_doc
import raw_doc
import write_html
import migration
# The expected HTML tags, useful for differentiating between F<T>::Type and real tags.
EXPECTED_TAGS = ['a', 'ul', 'ol', 'li', 'dl', 'dt', 'dd', 'em', 'i', 'b',
'strong', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'h7', 'tt',
'table', 'tbody', 'tr', 'th', 'td', 'caption', 'sup', 'img']
class FileNameSource(object):
def __init__(self, paths):
self.paths = paths
self.extensions = ['.h', '.cpp', '.dox']
self.ignore = ['.svn']
def generate(self):
for p in self.paths:
if os.path.isfile(p):
yield os.path.abspath(p)
for root, subFolders, files in os.walk(p):
for f in files:
if f.startswith('.'):
continue
if not any([f.endswith(s) for s in self.extensions]):
continue
if any([f.startswith(s) for s in self.ignore]):
continue
yield os.path.join(root, f)
def doMain(args):
msg_printer = dox_parser.MessagePrinter(args.ignore_warnings_dirs)
# Parse all legacy files.
import seqan.dddoc.core as core
app = core.App()
for path in args.legacy_doc_dirs:
print 'Scanning %s...' % path
app.loadFiles(path)
migrated_doc = raw_doc.RawDoc()
if args.legacy_doc_dirs:
app.loadingComplete()
migrated_doc.entries = migration.migrate(app.dddoc_tree)
print 'migrated_doc.entries', [e.name.text for e in migrated_doc.entries]
# Parse all normal input files.
fmgr = file_mgr.FileManager()
master_doc = raw_doc.RawDoc()
master_doc.merge(migrated_doc)
fns = FileNameSource(args.inputs)
for filename in fns.generate():
if args.debug:
print 'Processing %s' % filename
the_file = fmgr.loadFile(filename)
lex = lexer.Lexer(dox_tokens.LEXER_TOKENS, skip_whitespace=False)
for comment in the_file.comments:
# TODO(holtgrew): Also give offset.
lex.input(comment.text, filename, comment.line + 1, comment.col, comment.offset_col)
parser = dox_parser.Parser()
try:
parser.parse(lex)
except dox_parser.ParserError, e:
msg_printer.printParserError(e)
return 1
master_doc.merge(parser.documentation)
# Generate documentation.
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
logger = logging.getLogger()
processor = proc_doc.DocProcessor(logger=logger, include_dirs=args.base_dirs,
expected_tags=args.expected_tags,
msg_printer=msg_printer)
try:
doc_proc = processor.run(master_doc)
except dox_parser.ParserError, e:
msg_printer.printParserError(e)
return 1
html_writer = write_html.HtmlWriter(doc_proc, args)
html_writer.generateFor()
msg_printer.printStats()
return 0
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--lex-only', dest='lex_only', help='Lex only.',
default=False, action='store_true')
parser.add_argument('--debug', dest='debug', help='Debug.',
default=False, action='store_true')
parser.add_argument('-i', dest='inputs', help='Path to input.',
action='append', default=[])
parser.add_argument('-lx', dest='legacy_demo_dirs', help='Path to legacy demos.',
action='append', default=[])
parser.add_argument('-ldd', dest='legacy_doc_dirs', help='Path to legacy doc dirs.',
action='append', default=[])
parser.add_argument('--image-dir', dest='image_dirs', default=[],
action='append', help='Path to image directory.')
parser.add_argument('-b', '--base-dir', help='Base directory for @include.',
default=['.'], dest='base_dirs', action='append')
parser.add_argument('--expected-tags', help='Expected tags, warn about other tags.',
action='append', default=EXPECTED_TAGS)
parser.add_argument('--ignore-warnings', help='Ignore warnings from directory.',
default=[], dest='ignore_warnings_dirs', action='append')
args = parser.parse_args()
#if not args.inputs:
# parser.error('Missing input.')
return doMain(args)
if __name__ == '__main__':
sys.exit(main())
| mit |
Communities-Communications/cc-odoo | addons/pad/py_etherpad/__init__.py | 505 | 7804 | """Module to talk to EtherpadLite API."""
import json
import urllib
import urllib2
class EtherpadLiteClient:
"""Client to talk to EtherpadLite API."""
API_VERSION = 1 # TODO probably 1.1 sometime soon
CODE_OK = 0
CODE_INVALID_PARAMETERS = 1
CODE_INTERNAL_ERROR = 2
CODE_INVALID_FUNCTION = 3
CODE_INVALID_API_KEY = 4
TIMEOUT = 20
apiKey = ""
baseUrl = "http://localhost:9001/api"
def __init__(self, apiKey=None, baseUrl=None):
if apiKey:
self.apiKey = apiKey
if baseUrl:
self.baseUrl = baseUrl
def call(self, function, arguments=None):
"""Create a dictionary of all parameters"""
url = '%s/%d/%s' % (self.baseUrl, self.API_VERSION, function)
params = arguments or {}
params.update({'apikey': self.apiKey})
data = urllib.urlencode(params, True)
try:
opener = urllib2.build_opener()
request = urllib2.Request(url=url, data=data)
response = opener.open(request, timeout=self.TIMEOUT)
result = response.read()
response.close()
except urllib2.HTTPError:
raise
result = json.loads(result)
if result is None:
raise ValueError("JSON response could not be decoded")
return self.handleResult(result)
def handleResult(self, result):
"""Handle API call result"""
if 'code' not in result:
raise Exception("API response has no code")
if 'message' not in result:
raise Exception("API response has no message")
if 'data' not in result:
result['data'] = None
if result['code'] == self.CODE_OK:
return result['data']
elif result['code'] == self.CODE_INVALID_PARAMETERS or result['code'] == self.CODE_INVALID_API_KEY:
raise ValueError(result['message'])
elif result['code'] == self.CODE_INTERNAL_ERROR:
raise Exception(result['message'])
elif result['code'] == self.CODE_INVALID_FUNCTION:
raise Exception(result['message'])
else:
raise Exception("An unexpected error occurred whilst handling the response")
# GROUPS
# Pads can belong to a group. There will always be public pads that do not belong to a group (or we give this group the id 0)
def createGroup(self):
"""creates a new group"""
return self.call("createGroup")
def createGroupIfNotExistsFor(self, groupMapper):
"""this functions helps you to map your application group ids to etherpad lite group ids"""
return self.call("createGroupIfNotExistsFor", {
"groupMapper": groupMapper
})
def deleteGroup(self, groupID):
"""deletes a group"""
return self.call("deleteGroup", {
"groupID": groupID
})
def listPads(self, groupID):
"""returns all pads of this group"""
return self.call("listPads", {
"groupID": groupID
})
def createGroupPad(self, groupID, padName, text=''):
"""creates a new pad in this group"""
params = {
"groupID": groupID,
"padName": padName,
}
if text:
params['text'] = text
return self.call("createGroupPad", params)
# AUTHORS
# Theses authors are bind to the attributes the users choose (color and name).
def createAuthor(self, name=''):
"""creates a new author"""
params = {}
if name:
params['name'] = name
return self.call("createAuthor", params)
def createAuthorIfNotExistsFor(self, authorMapper, name=''):
"""this functions helps you to map your application author ids to etherpad lite author ids"""
params = {
'authorMapper': authorMapper
}
if name:
params['name'] = name
return self.call("createAuthorIfNotExistsFor", params)
# SESSIONS
# Sessions can be created between a group and a author. This allows
# an author to access more than one group. The sessionID will be set as
# a cookie to the client and is valid until a certain date.
def createSession(self, groupID, authorID, validUntil):
"""creates a new session"""
return self.call("createSession", {
"groupID": groupID,
"authorID": authorID,
"validUntil": validUntil
})
def deleteSession(self, sessionID):
"""deletes a session"""
return self.call("deleteSession", {
"sessionID": sessionID
})
def getSessionInfo(self, sessionID):
"""returns informations about a session"""
return self.call("getSessionInfo", {
"sessionID": sessionID
})
def listSessionsOfGroup(self, groupID):
"""returns all sessions of a group"""
return self.call("listSessionsOfGroup", {
"groupID": groupID
})
def listSessionsOfAuthor(self, authorID):
"""returns all sessions of an author"""
return self.call("listSessionsOfAuthor", {
"authorID": authorID
})
# PAD CONTENT
# Pad content can be updated and retrieved through the API
def getText(self, padID, rev=None):
"""returns the text of a pad"""
params = {"padID": padID}
if rev is not None:
params['rev'] = rev
return self.call("getText", params)
# introduced with pull request merge
def getHtml(self, padID, rev=None):
"""returns the html of a pad"""
params = {"padID": padID}
if rev is not None:
params['rev'] = rev
return self.call("getHTML", params)
def setText(self, padID, text):
"""sets the text of a pad"""
return self.call("setText", {
"padID": padID,
"text": text
})
def setHtml(self, padID, html):
"""sets the text of a pad from html"""
return self.call("setHTML", {
"padID": padID,
"html": html
})
# PAD
# Group pads are normal pads, but with the name schema
# GROUPID$PADNAME. A security manager controls access of them and its
# forbidden for normal pads to include a in the name.
def createPad(self, padID, text=''):
"""creates a new pad"""
params = {
"padID": padID,
}
if text:
params['text'] = text
return self.call("createPad", params)
def getRevisionsCount(self, padID):
"""returns the number of revisions of this pad"""
return self.call("getRevisionsCount", {
"padID": padID
})
def deletePad(self, padID):
"""deletes a pad"""
return self.call("deletePad", {
"padID": padID
})
def getReadOnlyID(self, padID):
"""returns the read only link of a pad"""
return self.call("getReadOnlyID", {
"padID": padID
})
def setPublicStatus(self, padID, publicStatus):
"""sets a boolean for the public status of a pad"""
return self.call("setPublicStatus", {
"padID": padID,
"publicStatus": publicStatus
})
def getPublicStatus(self, padID):
"""return true of false"""
return self.call("getPublicStatus", {
"padID": padID
})
def setPassword(self, padID, password):
"""returns ok or a error message"""
return self.call("setPassword", {
"padID": padID,
"password": password
})
def isPasswordProtected(self, padID):
"""returns true or false"""
return self.call("isPasswordProtected", {
"padID": padID
})
| agpl-3.0 |
mateor/pdroid | android-2.3.4_r1/tags/1.25/frameworks/ex/common/tools/make-iana-tld-pattern.py | 8 | 3678 | #!/usr/bin/env python
from urllib2 import urlopen
TLD_PREFIX = r"""
/**
* Regular expression to match all IANA top-level domains.
* List accurate as of 2010/02/05. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
* This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR =
"""
TLD_SUFFIX = '";'
URL_PREFIX = r"""
/**
* Regular expression to match all IANA top-level domains for WEB_URL.
* List accurate as of 2010/02/05. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
* This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL =
"(?:"
"""
URL_SUFFIX = ';'
class Bucket:
def __init__(self, baseLetter):
self.base=baseLetter
self.words=[]
self.letters=[]
def dump(self, isWebUrl=False, isFirst=False, isLast=False):
if (len(self.words) == 0) and (len(self.letters) == 0):
return ''
self.words.sort()
self.letters.sort()
output = ' ';
if isFirst:
if isWebUrl:
output += '+ "'
else:
output += '"('
else:
output += '+ "|'
if len(self.words) != 0:
output += '('
if isWebUrl:
output += '?:'
firstWord = 1
for word in self.words:
if firstWord == 0:
output += '|'
firstWord = 0
for letter in word:
if letter == '-':
output += '\\\\' # escape the '-' character.
output += letter
if len(self.words) > 0 and len(self.letters) > 0:
output += '|'
if len(self.letters) == 1:
output += '%c%c' % (self.base, self.letters[0])
elif len(self.letters) > 0:
output += '%c[' % self.base
for letter in self.letters:
output += letter
output += ']'
if len(self.words) != 0:
output += ')'
if not isLast:
output += '"'
output += '\n'
return output;
def add(self, line):
length = len(line)
if line.startswith('#') or (length == 0):
return;
if length == 2:
self.letters.append(line[1:2])
else:
self.words.append(line)
def getBucket(buckets, line):
letter = line[0]
bucket = buckets.get(letter)
if bucket is None:
bucket = Bucket(letter)
buckets[letter] = bucket
return bucket
def makePattern(prefix, suffix, buckets, isWebUrl=False):
output = prefix
output += getBucket(buckets, 'a').dump(isFirst=True, isWebUrl=isWebUrl)
for letter in range(ord('b'), ord('z')):
output += getBucket(buckets, chr(letter)).dump(isWebUrl=isWebUrl)
output += getBucket(buckets, 'z').dump(isLast=True, isWebUrl=isWebUrl)
if isWebUrl:
output += '))"'
else:
output += ')'
output += suffix
print output
if __name__ == "__main__":
f = urlopen('http://data.iana.org/TLD/tlds-alpha-by-domain.txt')
domains = f.readlines()
f.close()
buckets = {}
for domain in domains:
domain = domain.lower()
if len(domain) > 0:
getBucket(buckets, domain[0]).add(domain.strip())
makePattern(TLD_PREFIX, TLD_SUFFIX, buckets, isWebUrl=False)
makePattern(URL_PREFIX, URL_SUFFIX, buckets, isWebUrl=True)
| gpl-3.0 |
JRock007/boxxy | dist/Boxxy.app/Contents/Resources/lib/python2.7/numpy/core/tests/test_datetime.py | 38 | 85016 | from __future__ import division, absolute_import, print_function
import os, pickle
import numpy
import numpy as np
from numpy.testing import *
from numpy.compat import asbytes
import datetime
# Use pytz to test out various time zones if available
try:
from pytz import timezone as tz
_has_pytz = True
except ImportError:
_has_pytz = False
class TestDateTime(TestCase):
def test_datetime_dtype_creation(self):
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'ns', 'ps', 'fs', 'as']:
dt1 = np.dtype('M8[750%s]'%unit)
assert_(dt1 == np.dtype('datetime64[750%s]' % unit))
dt2 = np.dtype('m8[%s]' % unit)
assert_(dt2 == np.dtype('timedelta64[%s]' % unit))
# Generic units shouldn't add [] to the end
assert_equal(str(np.dtype("M8")), "datetime64")
# Should be possible to specify the endianness
assert_equal(np.dtype("=M8"), np.dtype("M8"))
assert_equal(np.dtype("=M8[s]"), np.dtype("M8[s]"))
assert_(np.dtype(">M8") == np.dtype("M8") or
np.dtype("<M8") == np.dtype("M8"))
assert_(np.dtype(">M8[D]") == np.dtype("M8[D]") or
np.dtype("<M8[D]") == np.dtype("M8[D]"))
assert_(np.dtype(">M8") != np.dtype("<M8"))
assert_equal(np.dtype("=m8"), np.dtype("m8"))
assert_equal(np.dtype("=m8[s]"), np.dtype("m8[s]"))
assert_(np.dtype(">m8") == np.dtype("m8") or
np.dtype("<m8") == np.dtype("m8"))
assert_(np.dtype(">m8[D]") == np.dtype("m8[D]") or
np.dtype("<m8[D]") == np.dtype("m8[D]"))
assert_(np.dtype(">m8") != np.dtype("<m8"))
# Check that the parser rejects bad datetime types
assert_raises(TypeError, np.dtype, 'M8[badunit]')
assert_raises(TypeError, np.dtype, 'm8[badunit]')
assert_raises(TypeError, np.dtype, 'M8[YY]')
assert_raises(TypeError, np.dtype, 'm8[YY]')
assert_warns(DeprecationWarning, np.dtype, 'm4')
assert_warns(DeprecationWarning, np.dtype, 'M7')
assert_warns(DeprecationWarning, np.dtype, 'm7')
assert_warns(DeprecationWarning, np.dtype, 'M16')
assert_warns(DeprecationWarning, np.dtype, 'm16')
def test_datetime_casting_rules(self):
# Cannot cast safely/same_kind between timedelta and datetime
assert_(not np.can_cast('m8', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8', 'M8', casting='safe'))
assert_(not np.can_cast('M8', 'm8', casting='safe'))
# Can cast safely/same_kind from integer to timedelta
assert_(np.can_cast('i8', 'm8', casting='same_kind'))
assert_(np.can_cast('i8', 'm8', casting='safe'))
# Cannot cast safely/same_kind from float to timedelta
assert_(not np.can_cast('f4', 'm8', casting='same_kind'))
assert_(not np.can_cast('f4', 'm8', casting='safe'))
# Cannot cast safely/same_kind from integer to datetime
assert_(not np.can_cast('i8', 'M8', casting='same_kind'))
assert_(not np.can_cast('i8', 'M8', casting='safe'))
# Cannot cast safely/same_kind from bool to datetime
assert_(not np.can_cast('b1', 'M8', casting='same_kind'))
assert_(not np.can_cast('b1', 'M8', casting='safe'))
# Can cast safely/same_kind from bool to timedelta
assert_(np.can_cast('b1', 'm8', casting='same_kind'))
assert_(np.can_cast('b1', 'm8', casting='safe'))
# Can cast datetime safely from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='safe'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='safe'))
# Cannot cast timedelta safely from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='safe'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='safe'))
# Can cast datetime same_kind from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='same_kind'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='same_kind'))
# Can't cast timedelta same_kind from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='same_kind'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='same_kind'))
# Can't cast datetime same_kind across the date/time boundary
assert_(not np.can_cast('M8[D]', 'M8[h]', casting='same_kind'))
assert_(not np.can_cast('M8[h]', 'M8[D]', casting='same_kind'))
# Can cast timedelta same_kind across the date/time boundary
assert_(np.can_cast('m8[D]', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8[h]', 'm8[D]', casting='same_kind'))
# Cannot cast safely if the integer multiplier doesn't divide
assert_(not np.can_cast('M8[7h]', 'M8[3h]', casting='safe'))
assert_(not np.can_cast('M8[3h]', 'M8[6h]', casting='safe'))
# But can cast same_kind
assert_(np.can_cast('M8[7h]', 'M8[3h]', casting='same_kind'))
# Can cast safely if the integer multiplier does divide
assert_(np.can_cast('M8[6h]', 'M8[3h]', casting='safe'))
def test_datetime_scalar_construction(self):
# Construct with different units
assert_equal(np.datetime64('1950-03-12', 'D'),
np.datetime64('1950-03-12'))
assert_equal(np.datetime64('1950-03-12T13Z', 's'),
np.datetime64('1950-03-12T13Z', 'm'))
# Default construction means NaT
assert_equal(np.datetime64(), np.datetime64('NaT'))
# Some basic strings and repr
assert_equal(str(np.datetime64('NaT')), 'NaT')
assert_equal(repr(np.datetime64('NaT')),
"numpy.datetime64('NaT')")
assert_equal(str(np.datetime64('2011-02')), '2011-02')
assert_equal(repr(np.datetime64('2011-02')),
"numpy.datetime64('2011-02')")
# None gets constructed as NaT
assert_equal(np.datetime64(None), np.datetime64('NaT'))
# Default construction of NaT is in generic units
assert_equal(np.datetime64().dtype, np.dtype('M8'))
assert_equal(np.datetime64('NaT').dtype, np.dtype('M8'))
# Construction from integers requires a specified unit
assert_raises(ValueError, np.datetime64, 17)
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.datetime64('2000-03-18T16Z', 'h')
b = np.array('2000-03-18T16Z', dtype='M8[h]')
assert_equal(a.dtype, np.dtype('M8[h]'))
assert_equal(b.dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a), a)
assert_equal(np.datetime64(a).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(b), a)
assert_equal(np.datetime64(b).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a, 's'), a)
assert_equal(np.datetime64(a, 's').dtype, np.dtype('M8[s]'))
assert_equal(np.datetime64(b, 's'), a)
assert_equal(np.datetime64(b, 's').dtype, np.dtype('M8[s]'))
# Construction from datetime.date
assert_equal(np.datetime64('1945-03-25'),
np.datetime64(datetime.date(1945, 3, 25)))
assert_equal(np.datetime64('2045-03-25', 'D'),
np.datetime64(datetime.date(2045, 3, 25), 'D'))
# Construction from datetime.datetime
assert_equal(np.datetime64('1980-01-25T14:36:22.5Z'),
np.datetime64(datetime.datetime(1980, 1, 25,
14, 36, 22, 500000)))
# Construction with time units from a date raises
assert_raises(TypeError, np.datetime64, '1920-03-13', 'h')
assert_raises(TypeError, np.datetime64, '1920-03', 'm')
assert_raises(TypeError, np.datetime64, '1920', 's')
assert_raises(TypeError, np.datetime64, datetime.date(2045, 3, 25), 'ms')
# Construction with date units from a datetime raises
assert_raises(TypeError, np.datetime64, '1920-03-13T18Z', 'D')
assert_raises(TypeError, np.datetime64, '1920-03-13T18:33Z', 'W')
assert_raises(TypeError, np.datetime64, '1920-03-13T18:33:12Z', 'M')
assert_raises(TypeError, np.datetime64, '1920-03-13T18:33:12.5Z', 'Y')
assert_raises(TypeError, np.datetime64,
datetime.datetime(1920, 4, 14, 13, 20), 'D')
def test_datetime_array_find_type(self):
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('M8[M]'))
# at the moment, we don't automatically convert these to datetime64
dt = datetime.date(1970, 1, 1)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
# find "supertype" for non-dates and dates
b = np.bool_(True)
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.date(1970, 1, 1)
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
def test_timedelta_scalar_construction(self):
# Construct with different units
assert_equal(np.timedelta64(7, 'D'),
np.timedelta64(1, 'W'))
assert_equal(np.timedelta64(120, 's'),
np.timedelta64(2, 'm'))
# Default construction means 0
assert_equal(np.timedelta64(), np.timedelta64(0))
# None gets constructed as NaT
assert_equal(np.timedelta64(None), np.timedelta64('NaT'))
# Some basic strings and repr
assert_equal(str(np.timedelta64('NaT')), 'NaT')
assert_equal(repr(np.timedelta64('NaT')),
"numpy.timedelta64('NaT')")
assert_equal(str(np.timedelta64(3, 's')), '3 seconds')
assert_equal(repr(np.timedelta64(-3, 's')),
"numpy.timedelta64(-3,'s')")
assert_equal(repr(np.timedelta64(12)),
"numpy.timedelta64(12)")
# Construction from an integer produces generic units
assert_equal(np.timedelta64(12).dtype, np.dtype('m8'))
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.timedelta64(2, 'h')
b = np.array(2, dtype='m8[h]')
assert_equal(a.dtype, np.dtype('m8[h]'))
assert_equal(b.dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a), a)
assert_equal(np.timedelta64(a).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(b), a)
assert_equal(np.timedelta64(b).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a, 's'), a)
assert_equal(np.timedelta64(a, 's').dtype, np.dtype('m8[s]'))
assert_equal(np.timedelta64(b, 's'), a)
assert_equal(np.timedelta64(b, 's').dtype, np.dtype('m8[s]'))
# Construction from datetime.timedelta
assert_equal(np.timedelta64(5, 'D'),
np.timedelta64(datetime.timedelta(days=5)))
assert_equal(np.timedelta64(102347621, 's'),
np.timedelta64(datetime.timedelta(seconds=102347621)))
assert_equal(np.timedelta64(-10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=-10234760000)))
assert_equal(np.timedelta64(10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=10234760000)))
assert_equal(np.timedelta64(1023476, 'ms'),
np.timedelta64(datetime.timedelta(milliseconds=1023476)))
assert_equal(np.timedelta64(10, 'm'),
np.timedelta64(datetime.timedelta(minutes=10)))
assert_equal(np.timedelta64(281, 'h'),
np.timedelta64(datetime.timedelta(hours=281)))
assert_equal(np.timedelta64(28, 'W'),
np.timedelta64(datetime.timedelta(weeks=28)))
# Cannot construct across nonlinear time unit boundaries
a = np.timedelta64(3, 's')
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = np.timedelta64(6, 'M')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'h')
a = np.timedelta64(1, 'Y')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'm')
def test_timedelta_scalar_construction_units(self):
# String construction detecting units
assert_equal(np.datetime64('2010').dtype,
np.dtype('M8[Y]'))
assert_equal(np.datetime64('2010-03').dtype,
np.dtype('M8[M]'))
assert_equal(np.datetime64('2010-03-12').dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64('2010-03-12T17').dtype,
np.dtype('M8[h]'))
assert_equal(np.datetime64('2010-03-12T17:15Z').dtype,
np.dtype('M8[m]'))
assert_equal(np.datetime64('2010-03-12T17:15:08Z').dtype,
np.dtype('M8[s]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1Z').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12Z').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123Z').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1234Z').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12345Z').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123456Z').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567Z').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678Z').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789Z').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567890Z').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678901Z').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789012Z').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123Z').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234Z').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345Z').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123456Z').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234567Z').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345678Z').dtype,
np.dtype('M8[as]'))
# Python date object
assert_equal(np.datetime64(datetime.date(2010, 4, 16)).dtype,
np.dtype('M8[D]'))
# Python datetime object
assert_equal(np.datetime64(
datetime.datetime(2010, 4, 16, 13, 45, 18)).dtype,
np.dtype('M8[us]'))
# 'today' special value
assert_equal(np.datetime64('today').dtype,
np.dtype('M8[D]'))
assert_raises(TypeError, np.datetime64, 'today', 'h')
assert_raises(TypeError, np.datetime64, 'today', 's')
assert_raises(TypeError, np.datetime64, 'today', 'as')
# 'now' special value
assert_equal(np.datetime64('now').dtype,
np.dtype('M8[s]'))
assert_raises(TypeError, np.datetime64, 'now', 'Y')
assert_raises(TypeError, np.datetime64, 'now', 'M')
assert_raises(TypeError, np.datetime64, 'now', 'D')
def test_datetime_nat_casting(self):
a = np.array('NaT', dtype='M8[D]')
b = np.datetime64('NaT', '[D]')
# Arrays
assert_equal(a.astype('M8[s]'), np.array('NaT', dtype='M8[s]'))
assert_equal(a.astype('M8[ms]'), np.array('NaT', dtype='M8[ms]'))
assert_equal(a.astype('M8[M]'), np.array('NaT', dtype='M8[M]'))
assert_equal(a.astype('M8[Y]'), np.array('NaT', dtype='M8[Y]'))
assert_equal(a.astype('M8[W]'), np.array('NaT', dtype='M8[W]'))
# Scalars -> Scalars
assert_equal(np.datetime64(b, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(b, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(b, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(b, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(b, '[W]'), np.datetime64('NaT', '[W]'))
# Arrays -> Scalars
assert_equal(np.datetime64(a, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(a, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(a, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(a, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(a, '[W]'), np.datetime64('NaT', '[W]'))
def test_days_creation(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 - 365)
assert_equal(np.array('1600', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3)
assert_equal(np.array('1601', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 + 366)
assert_equal(np.array('1900', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4)
assert_equal(np.array('1901', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4 + 365)
assert_equal(np.array('1967', dtype='M8[D]').astype('i8'), -3*365 - 1)
assert_equal(np.array('1968', dtype='M8[D]').astype('i8'), -2*365 - 1)
assert_equal(np.array('1969', dtype='M8[D]').astype('i8'), -1*365)
assert_equal(np.array('1970', dtype='M8[D]').astype('i8'), 0*365)
assert_equal(np.array('1971', dtype='M8[D]').astype('i8'), 1*365)
assert_equal(np.array('1972', dtype='M8[D]').astype('i8'), 2*365)
assert_equal(np.array('1973', dtype='M8[D]').astype('i8'), 3*365 + 1)
assert_equal(np.array('1974', dtype='M8[D]').astype('i8'), 4*365 + 1)
assert_equal(np.array('2000', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4)
assert_equal(np.array('2001', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366)
assert_equal(np.array('2400', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3)
assert_equal(np.array('2401', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3 + 366)
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 28)
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 29)
assert_equal(np.array('2000-02-29', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 28)
assert_equal(np.array('2000-03-01', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 29)
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366 + 31 + 28 + 21)
def test_days_to_pydate(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('O'),
datetime.date(1599, 1, 1))
assert_equal(np.array('1600', dtype='M8[D]').astype('O'),
datetime.date(1600, 1, 1))
assert_equal(np.array('1601', dtype='M8[D]').astype('O'),
datetime.date(1601, 1, 1))
assert_equal(np.array('1900', dtype='M8[D]').astype('O'),
datetime.date(1900, 1, 1))
assert_equal(np.array('1901', dtype='M8[D]').astype('O'),
datetime.date(1901, 1, 1))
assert_equal(np.array('2000', dtype='M8[D]').astype('O'),
datetime.date(2000, 1, 1))
assert_equal(np.array('2001', dtype='M8[D]').astype('O'),
datetime.date(2001, 1, 1))
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('O'),
datetime.date(1600, 2, 29))
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('O'),
datetime.date(1600, 3, 1))
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('O'),
datetime.date(2001, 3, 22))
def test_dtype_comparison(self):
assert_(not (np.dtype('M8[us]') == np.dtype('M8[ms]')))
assert_(np.dtype('M8[us]') != np.dtype('M8[ms]'))
assert_(np.dtype('M8[2D]') != np.dtype('M8[D]'))
assert_(np.dtype('M8[D]') != np.dtype('M8[2D]'))
def test_pydatetime_creation(self):
a = np.array(['1960-03-12', datetime.date(1960, 3, 12)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['1999-12-31', datetime.date(1999, 12, 31)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['2000-01-01', datetime.date(2000, 1, 1)], dtype='M8[D]')
assert_equal(a[0], a[1])
# Will fail if the date changes during the exact right moment
a = np.array(['today', datetime.date.today()], dtype='M8[D]')
assert_equal(a[0], a[1])
# datetime.datetime.now() returns local time, not UTC
#a = np.array(['now', datetime.datetime.now()], dtype='M8[s]')
#assert_equal(a[0], a[1])
# A datetime.date will raise if you try to give it time units
assert_raises(TypeError, np.array, datetime.date(1960, 3, 12),
dtype='M8[s]')
def test_datetime_string_conversion(self):
a = ['2011-03-16', '1920-01-01', '2013-05-19']
str_a = np.array(a, dtype='S')
dt_a = np.array(a, dtype='M')
str_b = np.empty_like(str_a)
dt_b = np.empty_like(dt_a)
# String to datetime
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
# Datetime to string
assert_equal(str_a, dt_a.astype('S0'))
str_b[...] = dt_a
assert_equal(str_a, str_b)
# Convert the 'S' to 'U'
str_a = str_a.astype('U')
str_b = str_b.astype('U')
# Unicode to datetime
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
# Datetime to unicode
assert_equal(str_a, dt_a.astype('U'))
str_b[...] = dt_a
assert_equal(str_a, str_b)
def test_datetime_array_str(self):
a = np.array(['2011-03-16', '1920-01-01', '2013-05-19'], dtype='M')
assert_equal(str(a), "['2011-03-16' '1920-01-01' '2013-05-19']")
a = np.array(['2011-03-16T13:55Z', '1920-01-01T03:12Z'], dtype='M')
assert_equal(np.array2string(a, separator=', ',
formatter={'datetime': lambda x :
"'%s'" % np.datetime_as_string(x, timezone='UTC')}),
"['2011-03-16T13:55Z', '1920-01-01T03:12Z']")
# Check that one NaT doesn't corrupt subsequent entries
a = np.array(['2010', 'NaT', '2030']).astype('M')
assert_equal(str(a), "['2010' 'NaT' '2030']")
def test_pickle(self):
# Check that pickle roundtripping works
dt = np.dtype('M8[7D]')
assert_equal(pickle.loads(pickle.dumps(dt)), dt)
dt = np.dtype('M8[W]')
assert_equal(pickle.loads(pickle.dumps(dt)), dt)
# Check that loading pickles from 1.6 works
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'D'\np6\n" + \
"I7\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('<M8[7D]'))
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'W'\np6\n" + \
"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('<M8[W]'))
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'>'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'us'\np6\n" + \
"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('>M8[us]'))
def test_setstate(self):
"Verify that datetime dtype __setstate__ can handle bad arguments"
dt = np.dtype('>M8[us]')
assert_raises(ValueError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, 1))
assert (dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
assert_raises(TypeError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, ({}, 'xxx')))
assert (dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
def test_dtype_promotion(self):
# datetime <op> datetime computes the metadata gcd
# timedelta <op> timedelta computes the metadata gcd
for mM in ['m', 'M']:
assert_equal(
np.promote_types(np.dtype(mM+'8[2Y]'), np.dtype(mM+'8[2Y]')),
np.dtype(mM+'8[2Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[12Y]'), np.dtype(mM+'8[15Y]')),
np.dtype(mM+'8[3Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[62M]'), np.dtype(mM+'8[24M]')),
np.dtype(mM+'8[2M]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[1W]'), np.dtype(mM+'8[2D]')),
np.dtype(mM+'8[1D]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[W]'), np.dtype(mM+'8[13s]')),
np.dtype(mM+'8[s]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[13W]'), np.dtype(mM+'8[49s]')),
np.dtype(mM+'8[7s]'))
# timedelta <op> timedelta raises when there is no reasonable gcd
assert_raises(TypeError, np.promote_types,
np.dtype('m8[Y]'), np.dtype('m8[D]'))
assert_raises(TypeError, np.promote_types,
np.dtype('m8[M]'), np.dtype('m8[W]'))
# timedelta <op> timedelta may overflow with big unit ranges
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[W]'), np.dtype('m8[fs]'))
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[s]'), np.dtype('m8[as]'))
def test_cast_overflow(self):
# gh-4486
def cast():
numpy.datetime64("1971-01-01 00:00:00.000000000000000").astype("<M8[D]")
assert_raises(OverflowError, cast)
def cast2():
numpy.datetime64("2014").astype("<M8[fs]")
assert_raises(OverflowError, cast2)
def test_pyobject_roundtrip(self):
# All datetime types should be able to roundtrip through object
a = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0,
-1020040340, -2942398, -1, 0, 1, 234523453, 1199164176],
dtype=np.int64)
# With date units
for unit in ['M8[D]', 'M8[W]', 'M8[M]', 'M8[Y]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01'
b[1] = '-0001-12-31'
b[2] = '0000-01-01'
b[3] = '0001-01-01'
b[4] = '1969-12-31'
b[5] = '1970-01-01'
b[6] = '9999-12-31'
b[7] = '10000-01-01'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
# With time units
for unit in ['M8[as]', 'M8[16fs]', 'M8[ps]', 'M8[us]',
'M8[300as]', 'M8[20us]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01T00Z'
b[1] = '-0001-12-31T00Z'
b[2] = '0000-01-01T00Z'
b[3] = '0001-01-01T00Z'
b[4] = '1969-12-31T23:59:59.999999Z'
b[5] = '1970-01-01T00Z'
b[6] = '9999-12-31T23:59:59.999999Z'
b[7] = '10000-01-01T00Z'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
def test_month_truncation(self):
# Make sure that months are truncating correctly
assert_equal(np.array('1945-03-01', dtype='M8[M]'),
np.array('1945-03-31', dtype='M8[M]'))
assert_equal(np.array('1969-11-01', dtype='M8[M]'),
np.array('1969-11-30T23:59:59.99999Z', dtype='M').astype('M8[M]'))
assert_equal(np.array('1969-12-01', dtype='M8[M]'),
np.array('1969-12-31T23:59:59.99999Z', dtype='M').astype('M8[M]'))
assert_equal(np.array('1970-01-01', dtype='M8[M]'),
np.array('1970-01-31T23:59:59.99999Z', dtype='M').astype('M8[M]'))
assert_equal(np.array('1980-02-01', dtype='M8[M]'),
np.array('1980-02-29T23:59:59.99999Z', dtype='M').astype('M8[M]'))
def test_different_unit_comparison(self):
# Check some years with date units
for unit1 in ['Y', 'M', 'D']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['Y', 'M', 'D']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945', dtype=dt1),
np.array('1945', dtype=dt2))
assert_equal(np.array('1970', dtype=dt1),
np.array('1970', dtype=dt2))
assert_equal(np.array('9999', dtype=dt1),
np.array('9999', dtype=dt2))
assert_equal(np.array('10000', dtype=dt1),
np.array('10000-01-01', dtype=dt2))
assert_equal(np.datetime64('1945', unit1),
np.datetime64('1945', unit2))
assert_equal(np.datetime64('1970', unit1),
np.datetime64('1970', unit2))
assert_equal(np.datetime64('9999', unit1),
np.datetime64('9999', unit2))
assert_equal(np.datetime64('10000', unit1),
np.datetime64('10000-01-01', unit2))
# Check some datetimes with time units
for unit1 in ['6h', 'h', 'm', 's', '10ms', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945-03-12T18Z', dtype=dt1),
np.array('1945-03-12T18Z', dtype=dt2))
assert_equal(np.array('1970-03-12T18Z', dtype=dt1),
np.array('1970-03-12T18Z', dtype=dt2))
assert_equal(np.array('9999-03-12T18Z', dtype=dt1),
np.array('9999-03-12T18Z', dtype=dt2))
assert_equal(np.array('10000-01-01T00Z', dtype=dt1),
np.array('10000-01-01T00Z', dtype=dt2))
assert_equal(np.datetime64('1945-03-12T18Z', unit1),
np.datetime64('1945-03-12T18Z', unit2))
assert_equal(np.datetime64('1970-03-12T18Z', unit1),
np.datetime64('1970-03-12T18Z', unit2))
assert_equal(np.datetime64('9999-03-12T18Z', unit1),
np.datetime64('9999-03-12T18Z', unit2))
assert_equal(np.datetime64('10000-01-01T00Z', unit1),
np.datetime64('10000-01-01T00Z', unit2))
# Check some days with units that won't overflow
for unit1 in ['D', '12h', 'h', 'm', 's', '4s', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['D', 'h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_(np.equal(np.array('1932-02-17', dtype='M').astype(dt1),
np.array('1932-02-17T00:00:00Z', dtype='M').astype(dt2),
casting='unsafe'))
assert_(np.equal(np.array('10000-04-27', dtype='M').astype(dt1),
np.array('10000-04-27T00:00:00Z', dtype='M').astype(dt2),
casting='unsafe'))
# Shouldn't be able to compare datetime and timedelta
# TODO: Changing to 'same_kind' or 'safe' casting in the ufuncs by
# default is needed to properly catch this kind of thing...
a = np.array('2012-12-21', dtype='M8[D]')
b = np.array(3, dtype='m8[D]')
#assert_raises(TypeError, np.less, a, b)
assert_raises(TypeError, np.less, a, b, casting='same_kind')
def test_datetime_like(self):
a = np.array([3], dtype='m8[4D]')
b = np.array(['2012-12-21'], dtype='M8[D]')
assert_equal(np.ones_like(a).dtype, a.dtype)
assert_equal(np.zeros_like(a).dtype, a.dtype)
assert_equal(np.empty_like(a).dtype, a.dtype)
assert_equal(np.ones_like(b).dtype, b.dtype)
assert_equal(np.zeros_like(b).dtype, b.dtype)
assert_equal(np.empty_like(b).dtype, b.dtype)
def test_datetime_unary(self):
for tda, tdb, tdzero, tdone, tdmone in \
[
# One-dimensional arrays
(np.array([3], dtype='m8[D]'),
np.array([-3], dtype='m8[D]'),
np.array([0], dtype='m8[D]'),
np.array([1], dtype='m8[D]'),
np.array([-1], dtype='m8[D]')),
# NumPy scalars
(np.timedelta64(3, '[D]'),
np.timedelta64(-3, '[D]'),
np.timedelta64(0, '[D]'),
np.timedelta64(1, '[D]'),
np.timedelta64(-1, '[D]'))]:
# negative ufunc
assert_equal(-tdb, tda)
assert_equal((-tdb).dtype, tda.dtype)
assert_equal(np.negative(tdb), tda)
assert_equal(np.negative(tdb).dtype, tda.dtype)
# absolute ufunc
assert_equal(np.absolute(tdb), tda)
assert_equal(np.absolute(tdb).dtype, tda.dtype)
# sign ufunc
assert_equal(np.sign(tda), tdone)
assert_equal(np.sign(tdb), tdmone)
assert_equal(np.sign(tdzero), tdzero)
assert_equal(np.sign(tda).dtype, tda.dtype)
# The ufuncs always produce native-endian results
assert_
def test_datetime_add(self):
for dta, dtb, dtc, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['2012-12-21T11Z'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 + 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('2012-12-21T11Z', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 + 11, '[h]'))]:
# m8 + m8
assert_equal(tda + tdb, tdc)
assert_equal((tda + tdb).dtype, np.dtype('m8[h]'))
# m8 + bool
assert_equal(tdb + True, tdb + 1)
assert_equal((tdb + True).dtype, np.dtype('m8[h]'))
# m8 + int
assert_equal(tdb + 3*24, tdc)
assert_equal((tdb + 3*24).dtype, np.dtype('m8[h]'))
# bool + m8
assert_equal(False + tdb, tdb)
assert_equal((False + tdb).dtype, np.dtype('m8[h]'))
# int + m8
assert_equal(3*24 + tdb, tdc)
assert_equal((3*24 + tdb).dtype, np.dtype('m8[h]'))
# M8 + bool
assert_equal(dta + True, dta + 1)
assert_equal(dtnat + True, dtnat)
assert_equal((dta + True).dtype, np.dtype('M8[D]'))
# M8 + int
assert_equal(dta + 3, dtb)
assert_equal(dtnat + 3, dtnat)
assert_equal((dta + 3).dtype, np.dtype('M8[D]'))
# bool + M8
assert_equal(False + dta, dta)
assert_equal(False + dtnat, dtnat)
assert_equal((False + dta).dtype, np.dtype('M8[D]'))
# int + M8
assert_equal(3 + dta, dtb)
assert_equal(3 + dtnat, dtnat)
assert_equal((3 + dta).dtype, np.dtype('M8[D]'))
# M8 + m8
assert_equal(dta + tda, dtb)
assert_equal(dtnat + tda, dtnat)
assert_equal((dta + tda).dtype, np.dtype('M8[D]'))
# m8 + M8
assert_equal(tda + dta, dtb)
assert_equal(tda + dtnat, dtnat)
assert_equal((tda + dta).dtype, np.dtype('M8[D]'))
# In M8 + m8, the result goes to higher precision
assert_equal(np.add(dta, tdb, casting='unsafe'), dtc)
assert_equal(np.add(dta, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
assert_equal(np.add(tdb, dta, casting='unsafe'), dtc)
assert_equal(np.add(tdb, dta, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 + M8
assert_raises(TypeError, np.add, dta, dtb)
def test_datetime_subtract(self):
for dta, dtb, dtc, dtd, dte, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['1940-12-24'], dtype='M8[D]'),
np.array(['1940-12-24T00Z'], dtype='M8[h]'),
np.array(['1940-12-23T13Z'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 - 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('1940-12-24', '[D]'),
np.datetime64('1940-12-24T00Z', '[h]'),
np.datetime64('1940-12-23T13Z', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 - 11, '[h]'))]:
# m8 - m8
assert_equal(tda - tdb, tdc)
assert_equal((tda - tdb).dtype, np.dtype('m8[h]'))
assert_equal(tdb - tda, -tdc)
assert_equal((tdb - tda).dtype, np.dtype('m8[h]'))
# m8 - bool
assert_equal(tdc - True, tdc - 1)
assert_equal((tdc - True).dtype, np.dtype('m8[h]'))
# m8 - int
assert_equal(tdc - 3*24, -tdb)
assert_equal((tdc - 3*24).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(False - tdb, -tdb)
assert_equal((False - tdb).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(3*24 - tdb, tdc)
assert_equal((3*24 - tdb).dtype, np.dtype('m8[h]'))
# M8 - bool
assert_equal(dtb - True, dtb - 1)
assert_equal(dtnat - True, dtnat)
assert_equal((dtb - True).dtype, np.dtype('M8[D]'))
# M8 - int
assert_equal(dtb - 3, dta)
assert_equal(dtnat - 3, dtnat)
assert_equal((dtb - 3).dtype, np.dtype('M8[D]'))
# M8 - m8
assert_equal(dtb - tda, dta)
assert_equal(dtnat - tda, dtnat)
assert_equal((dtb - tda).dtype, np.dtype('M8[D]'))
# In M8 - m8, the result goes to higher precision
assert_equal(np.subtract(dtc, tdb, casting='unsafe'), dte)
assert_equal(np.subtract(dtc, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 - M8 with different goes to higher precision
assert_equal(np.subtract(dtc, dtd, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtc, dtd, casting='unsafe').dtype,
np.dtype('m8[h]'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe').dtype,
np.dtype('m8[h]'))
# m8 - M8
assert_raises(TypeError, np.subtract, tda, dta)
# bool - M8
assert_raises(TypeError, np.subtract, False, dta)
# int - M8
assert_raises(TypeError, np.subtract, 3, dta)
def test_datetime_multiply(self):
for dta, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'))]:
# m8 * int
assert_equal(tda * 2, tdc)
assert_equal((tda * 2).dtype, np.dtype('m8[h]'))
# int * m8
assert_equal(2 * tda, tdc)
assert_equal((2 * tda).dtype, np.dtype('m8[h]'))
# m8 * float
assert_equal(tda * 1.5, tdb)
assert_equal((tda * 1.5).dtype, np.dtype('m8[h]'))
# float * m8
assert_equal(1.5 * tda, tdb)
assert_equal((1.5 * tda).dtype, np.dtype('m8[h]'))
# m8 * m8
assert_raises(TypeError, np.multiply, tda, tdb)
# m8 * M8
assert_raises(TypeError, np.multiply, dta, tda)
# M8 * m8
assert_raises(TypeError, np.multiply, tda, dta)
# M8 * int
assert_raises(TypeError, np.multiply, dta, 2)
# int * M8
assert_raises(TypeError, np.multiply, 2, dta)
# M8 * float
assert_raises(TypeError, np.multiply, dta, 1.5)
# float * M8
assert_raises(TypeError, np.multiply, 1.5, dta)
def test_datetime_divide(self):
for dta, tda, tdb, tdc, tdd in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]'),
np.array([6], dtype='m8[m]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'),
np.timedelta64(6, '[m]'))]:
# m8 / int
assert_equal(tdc / 2, tda)
assert_equal((tdc / 2).dtype, np.dtype('m8[h]'))
# m8 / float
assert_equal(tda / 0.5, tdc)
assert_equal((tda / 0.5).dtype, np.dtype('m8[h]'))
# m8 / m8
assert_equal(tda / tdb, 6.0 / 9.0)
assert_equal(np.divide(tda, tdb), 6.0 / 9.0)
assert_equal(np.true_divide(tda, tdb), 6.0 / 9.0)
assert_equal(tdb / tda, 9.0 / 6.0)
assert_equal((tda / tdb).dtype, np.dtype('f8'))
assert_equal(tda / tdd, 60.0)
assert_equal(tdd / tda, 1.0 / 60.0)
# m8 // m8
assert_raises(TypeError, np.floor_divide, tda, tdb)
# int / m8
assert_raises(TypeError, np.divide, 2, tdb)
# float / m8
assert_raises(TypeError, np.divide, 0.5, tdb)
# m8 / M8
assert_raises(TypeError, np.divide, dta, tda)
# M8 / m8
assert_raises(TypeError, np.divide, tda, dta)
# M8 / int
assert_raises(TypeError, np.divide, dta, 2)
# int / M8
assert_raises(TypeError, np.divide, 2, dta)
# M8 / float
assert_raises(TypeError, np.divide, dta, 1.5)
# float / M8
assert_raises(TypeError, np.divide, 1.5, dta)
def test_datetime_compare(self):
# Test all the comparison operators
a = np.datetime64('2000-03-12T18:00:00.000000-0600')
b = np.array(['2000-03-12T18:00:00.000000-0600',
'2000-03-12T17:59:59.999999-0600',
'2000-03-12T18:00:00.000001-0600',
'1970-01-11T12:00:00.909090-0600',
'2016-01-11T12:00:00.909090-0600'],
dtype='datetime64[us]')
assert_equal(np.equal(a, b), [1, 0, 0, 0, 0])
assert_equal(np.not_equal(a, b), [0, 1, 1, 1, 1])
assert_equal(np.less(a, b), [0, 0, 1, 0, 1])
assert_equal(np.less_equal(a, b), [1, 0, 1, 0, 1])
assert_equal(np.greater(a, b), [0, 1, 0, 1, 0])
assert_equal(np.greater_equal(a, b), [1, 1, 0, 1, 0])
def test_datetime_minmax(self):
# The metadata of the result should become the GCD
# of the operand metadata
a = np.array('1999-03-12T13Z', dtype='M8[2m]')
b = np.array('1999-03-12T12Z', dtype='M8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('M8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# Interaction with NaT
a = np.array('1999-03-12T13Z', dtype='M8[2m]')
dtnat = np.array('NaT', dtype='M8[h]')
assert_equal(np.minimum(a, dtnat), a)
assert_equal(np.minimum(dtnat, a), a)
assert_equal(np.maximum(a, dtnat), a)
assert_equal(np.maximum(dtnat, a), a)
# Also do timedelta
a = np.array(3, dtype='m8[h]')
b = np.array(3*3600 - 3, dtype='m8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('m8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# should raise between datetime and timedelta
#
# TODO: Allowing unsafe casting by
# default in ufuncs strikes again... :(
a = np.array(3, dtype='m8[h]')
b = np.array('1999-03-12T12Z', dtype='M8[s]')
#assert_raises(TypeError, np.minimum, a, b)
#assert_raises(TypeError, np.maximum, a, b)
#assert_raises(TypeError, np.fmin, a, b)
#assert_raises(TypeError, np.fmax, a, b)
assert_raises(TypeError, np.minimum, a, b, casting='same_kind')
assert_raises(TypeError, np.maximum, a, b, casting='same_kind')
assert_raises(TypeError, np.fmin, a, b, casting='same_kind')
assert_raises(TypeError, np.fmax, a, b, casting='same_kind')
def test_hours(self):
t = np.ones(3, dtype='M8[s]')
t[0] = 60*60*24 + 60*60*10
assert_(t[0].item().hour == 10 )
def test_divisor_conversion_year(self):
assert_(np.dtype('M8[Y/4]') == np.dtype('M8[3M]'))
assert_(np.dtype('M8[Y/13]') == np.dtype('M8[4W]'))
assert_(np.dtype('M8[3Y/73]') == np.dtype('M8[15D]'))
def test_divisor_conversion_month(self):
assert_(np.dtype('M8[M/2]') == np.dtype('M8[2W]'))
assert_(np.dtype('M8[M/15]') == np.dtype('M8[2D]'))
assert_(np.dtype('M8[3M/40]') == np.dtype('M8[54h]'))
def test_divisor_conversion_week(self):
assert_(np.dtype('m8[W/7]') == np.dtype('m8[D]'))
assert_(np.dtype('m8[3W/14]') == np.dtype('m8[36h]'))
assert_(np.dtype('m8[5W/140]') == np.dtype('m8[360m]'))
def test_divisor_conversion_day(self):
assert_(np.dtype('M8[D/12]') == np.dtype('M8[2h]'))
assert_(np.dtype('M8[D/120]') == np.dtype('M8[12m]'))
assert_(np.dtype('M8[3D/960]') == np.dtype('M8[270s]'))
def test_divisor_conversion_hour(self):
assert_(np.dtype('m8[h/30]') == np.dtype('m8[2m]'))
assert_(np.dtype('m8[3h/300]') == np.dtype('m8[36s]'))
def test_divisor_conversion_minute(self):
assert_(np.dtype('m8[m/30]') == np.dtype('m8[2s]'))
assert_(np.dtype('m8[3m/300]') == np.dtype('m8[600ms]'))
def test_divisor_conversion_second(self):
assert_(np.dtype('m8[s/100]') == np.dtype('m8[10ms]'))
assert_(np.dtype('m8[3s/10000]') == np.dtype('m8[300us]'))
def test_divisor_conversion_fs(self):
assert_(np.dtype('M8[fs/100]') == np.dtype('M8[10as]'))
self.assertRaises(ValueError, lambda : np.dtype('M8[3fs/10000]'))
def test_divisor_conversion_as(self):
self.assertRaises(ValueError, lambda : np.dtype('M8[as/10]'))
def test_string_parser_variants(self):
# Allow space instead of 'T' between date and time
assert_equal(np.array(['1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow negative years
assert_equal(np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03'], np.dtype('M8[s]')))
# UTC specifier
assert_equal(np.array(['-1980-02-29T01:02:03Z'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
# Time zone offset
assert_equal(np.array(['1980-02-29T02:02:03Z'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03-0130'], np.dtype('M8[s]')))
assert_equal(np.array(['1980-02-28T22:32:03Z'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:02:03+01:30'], np.dtype('M8[s]')))
assert_equal(np.array(['1980-02-29T02:32:03.506Z'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03.506-02'], np.dtype('M8[s]')))
assert_equal(np.datetime64('1977-03-02T12:30-0230'),
np.datetime64('1977-03-02T15:00Z'))
def test_string_parser_error_check(self):
# Arbitrary bad string
assert_raises(ValueError, np.array, ['badvalue'], np.dtype('M8[us]'))
# Character after year must be '-'
assert_raises(ValueError, np.array, ['1980X'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-'], np.dtype('M8[us]'))
# Month must be in range [1,12]
assert_raises(ValueError, np.array, ['1980-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-13'], np.dtype('M8[us]'))
# Month must have two digits
assert_raises(ValueError, np.array, ['1980-1'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-1-02'], np.dtype('M8[us]'))
# 'Mor' is not a valid month
assert_raises(ValueError, np.array, ['1980-Mor'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-01-'], np.dtype('M8[us]'))
# Day must be in range [1,len(month)]
assert_raises(ValueError, np.array, ['1980-01-0'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1979-02-29'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-30'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-03-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-04-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-05-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-06-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-07-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-08-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-09-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-10-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-11-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-12-32'], np.dtype('M8[us]'))
# Cannot have trailing characters
assert_raises(ValueError, np.array, ['1980-02-03%'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 q'],
np.dtype('M8[us]'))
# Hours must be in range [0, 23]
assert_raises(ValueError, np.array, ['1980-02-03 25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 -1'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:'],
np.dtype('M8[us]'))
# Minutes must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:60'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:60:'],
np.dtype('M8[us]'))
# Seconds must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:10:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:60'],
np.dtype('M8[us]'))
# Timezone offset must within a reasonable range
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+0661'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+2500'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-0070'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-3000'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-25:00'],
np.dtype('M8[us]'))
def test_creation_overflow(self):
date = '1980-03-23 20:00:00Z'
timesteps = np.array([date], dtype='datetime64[s]')[0].astype(np.int64)
for unit in ['ms', 'us', 'ns']:
timesteps *= 1000
x = np.array([date], dtype='datetime64[%s]' % unit)
assert_equal(timesteps, x[0].astype(np.int64),
err_msg='Datetime conversion error for unit %s' % unit)
assert_equal(x[0].astype(np.int64), 322689600000000000)
def test_datetime_as_string(self):
# Check all the units with default string conversion
date = '1959-10-13'
datetime = '1959-10-13T12:34:56.789012345678901234Z'
assert_equal(np.datetime_as_string(np.datetime64(date, 'Y')),
'1959')
assert_equal(np.datetime_as_string(np.datetime64(date, 'M')),
'1959-10')
assert_equal(np.datetime_as_string(np.datetime64(date, 'D')),
'1959-10-13')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'h')),
'1959-10-13T12Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'm')),
'1959-10-13T12:34Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 's')),
'1959-10-13T12:34:56Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ms')),
'1959-10-13T12:34:56.789Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'us')),
'1959-10-13T12:34:56.789012Z')
datetime = '1969-12-31T23:34:56.789012345678901234Z'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1969-12-31T23:34:56.789012345Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1969-12-31T23:34:56.789012345678Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1969-12-31T23:34:56.789012345678901Z')
datetime = '1969-12-31T23:59:57.789012345678901234Z'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
datetime = '1970-01-01T00:34:56.789012345678901234Z'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1970-01-01T00:34:56.789012345Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1970-01-01T00:34:56.789012345678Z')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1970-01-01T00:34:56.789012345678901Z')
datetime = '1970-01-01T00:00:05.789012345678901234Z'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
# String conversion with the unit= parameter
a = np.datetime64('2032-07-18T12:23:34.123456Z', 'us')
assert_equal(np.datetime_as_string(a, unit='Y', casting='unsafe'),
'2032')
assert_equal(np.datetime_as_string(a, unit='M', casting='unsafe'),
'2032-07')
assert_equal(np.datetime_as_string(a, unit='W', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='D', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='h'), '2032-07-18T12Z')
assert_equal(np.datetime_as_string(a, unit='m'),
'2032-07-18T12:23Z')
assert_equal(np.datetime_as_string(a, unit='s'),
'2032-07-18T12:23:34Z')
assert_equal(np.datetime_as_string(a, unit='ms'),
'2032-07-18T12:23:34.123Z')
assert_equal(np.datetime_as_string(a, unit='us'),
'2032-07-18T12:23:34.123456Z')
assert_equal(np.datetime_as_string(a, unit='ns'),
'2032-07-18T12:23:34.123456000Z')
assert_equal(np.datetime_as_string(a, unit='ps'),
'2032-07-18T12:23:34.123456000000Z')
assert_equal(np.datetime_as_string(a, unit='fs'),
'2032-07-18T12:23:34.123456000000000Z')
assert_equal(np.datetime_as_string(a, unit='as'),
'2032-07-18T12:23:34.123456000000000000Z')
# unit='auto' parameter
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.123456Z', 'us'),
unit='auto'),
'2032-07-18T12:23:34.123456Z')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.12Z', 'us'),
unit='auto'),
'2032-07-18T12:23:34.120Z')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34Z', 'us'),
unit='auto'),
'2032-07-18T12:23:34Z')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:00Z', 'us'),
unit='auto'),
'2032-07-18T12:23Z')
# 'auto' doesn't split up hour and minute
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:00:00Z', 'us'),
unit='auto'),
'2032-07-18T12:00Z')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T00:00:00Z', 'us'),
unit='auto'),
'2032-07-18')
# 'auto' doesn't split up the date
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-01T00:00:00Z', 'us'),
unit='auto'),
'2032-07-01')
assert_equal(np.datetime_as_string(
np.datetime64('2032-01-01T00:00:00Z', 'us'),
unit='auto'),
'2032-01-01')
@dec.skipif(not _has_pytz, "The pytz module is not available.")
def test_datetime_as_string_timezone(self):
# timezone='local' vs 'UTC'
a = np.datetime64('2010-03-15T06:30Z', 'm')
assert_equal(np.datetime_as_string(a, timezone='UTC'),
'2010-03-15T06:30Z')
assert_(np.datetime_as_string(a, timezone='local') !=
'2010-03-15T06:30Z')
b = np.datetime64('2010-02-15T06:30Z', 'm')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Central')),
'2010-03-15T01:30-0500')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Eastern')),
'2010-03-15T02:30-0400')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Pacific')),
'2010-03-14T23:30-0700')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Central')),
'2010-02-15T00:30-0600')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Eastern')),
'2010-02-15T01:30-0500')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Pacific')),
'2010-02-14T22:30-0800')
# Dates to strings with a timezone attached is disabled by default
assert_raises(TypeError, np.datetime_as_string, a, unit='D',
timezone=tz('US/Pacific'))
# Check that we can print out the date in the specified time zone
assert_equal(np.datetime_as_string(a, unit='D',
timezone=tz('US/Pacific'), casting='unsafe'),
'2010-03-14')
assert_equal(np.datetime_as_string(b, unit='D',
timezone=tz('US/Central'), casting='unsafe'),
'2010-02-15')
def test_datetime_arange(self):
# With two datetimes provided as strings
a = np.arange('2010-01-05', '2010-01-10', dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['2010-01-05', '2010-01-06', '2010-01-07',
'2010-01-08', '2010-01-09'], dtype='M8[D]'))
a = np.arange('1950-02-10', '1950-02-06', -1, dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['1950-02-10', '1950-02-09', '1950-02-08',
'1950-02-07'], dtype='M8[D]'))
# Unit should be detected as months here
a = np.arange('1969-05', '1970-05', 2, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[M]'))
assert_equal(a,
np.datetime64('1969-05') + np.arange(12, step=2))
# datetime, integer|timedelta works as well
# produces arange (start, start + stop) in this case
a = np.arange('1969', 18, 3, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[Y]'))
assert_equal(a,
np.datetime64('1969') + np.arange(18, step=3))
a = np.arange('1969-12-19', 22, np.timedelta64(2), dtype='M8')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.datetime64('1969-12-19') + np.arange(22, step=2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.datetime64('today'),
np.datetime64('today') + 3, 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.datetime64('2011-03-01', 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange,
np.datetime64('2012-02-03T14Z', 's'),
np.timedelta64(5, 'Y'))
def test_datetime_arange_no_dtype(self):
d = np.array('2010-01-04', dtype="M8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_timedelta_arange(self):
a = np.arange(3, 10, dtype='m8')
assert_equal(a.dtype, np.dtype('m8'))
assert_equal(a, np.timedelta64(0) + np.arange(3, 10))
a = np.arange(np.timedelta64(3, 's'), 10, 2, dtype='m8')
assert_equal(a.dtype, np.dtype('m8[s]'))
assert_equal(a, np.timedelta64(0, 's') + np.arange(3, 10, 2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.timedelta64(0),
np.timedelta64(5), 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.timedelta64(0, 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange, np.timedelta64(0, 'Y'),
np.timedelta64(5, 'D'))
def test_timedelta_arange_no_dtype(self):
d = np.array(5, dtype="m8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_datetime_maximum_reduce(self):
a = np.array(['2010-01-02', '1999-03-14', '1833-03'], dtype='M8[D]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('M8[D]'))
assert_equal(np.maximum.reduce(a),
np.datetime64('2010-01-02'))
a = np.array([1, 4, 0, 7, 2], dtype='m8[s]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum.reduce(a),
np.timedelta64(7, 's'))
def test_datetime_busday_offset(self):
# First Monday in June
assert_equal(
np.busday_offset('2011-06', 0, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-06'))
# Last Monday in June
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
# Default M-F business days, different roll modes
assert_equal(np.busday_offset('2010-08', 0, roll='backward'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='preceding'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedpreceding'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedfollowing'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='forward'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='following'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-10-30', 0, roll='following'),
np.datetime64('2010-11-01'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-29'))
# roll='raise' by default
assert_raises(ValueError, np.busday_offset, '2011-06-04', 0)
# Bigger offset values
assert_equal(np.busday_offset('2006-02-01', 25),
np.datetime64('2006-03-08'))
assert_equal(np.busday_offset('2006-03-08', -25),
np.datetime64('2006-02-01'))
assert_equal(np.busday_offset('2007-02-25', 11, weekmask='SatSun'),
np.datetime64('2007-04-07'))
assert_equal(np.busday_offset('2007-04-07', -11, weekmask='SatSun'),
np.datetime64('2007-02-25'))
def test_datetime_busdaycalendar(self):
# Check that it removes NaT, duplicates, and weekends
# and sorts the result.
bdd = np.busdaycalendar(
holidays=['NaT', '2011-01-17', '2011-03-06', 'NaT',
'2011-12-26', '2011-05-30', '2011-01-17'])
assert_equal(bdd.holidays,
np.array(['2011-01-17', '2011-05-30', '2011-12-26'], dtype='M8'))
# Default M-F weekmask
assert_equal(bdd.weekmask, np.array([1, 1, 1, 1, 1, 0, 0], dtype='?'))
# Check string weekmask with varying whitespace.
bdd = np.busdaycalendar(weekmask="Sun TueWed Thu\tFri")
assert_equal(bdd.weekmask, np.array([0, 1, 1, 1, 1, 0, 1], dtype='?'))
# Check length 7 0/1 string
bdd = np.busdaycalendar(weekmask="0011001")
assert_equal(bdd.weekmask, np.array([0, 0, 1, 1, 0, 0, 1], dtype='?'))
# Check length 7 string weekmask.
bdd = np.busdaycalendar(weekmask="Mon Tue")
assert_equal(bdd.weekmask, np.array([1, 1, 0, 0, 0, 0, 0], dtype='?'))
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask=[0, 0, 0, 0, 0, 0, 0])
# weekday names must be correct case
assert_raises(ValueError, np.busdaycalendar, weekmask="satsun")
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="")
# Invalid weekday name codes should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="Mon Tue We")
assert_raises(ValueError, np.busdaycalendar, weekmask="Max")
assert_raises(ValueError, np.busdaycalendar, weekmask="Monday Tue")
def test_datetime_busday_holidays_offset(self):
# With exactly one holiday
assert_equal(
np.busday_offset('2011-11-10', 1, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-04', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-10', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-18'))
assert_equal(
np.busday_offset('2011-11-14', -1, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-18', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-14', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-04'))
# With the holiday appearing twice
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-10'))
# With a NaT holiday
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', 'NaT']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['NaT', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# With another holiday before
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday before and after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# A bigger forward jump across more than one week/holiday
holidays=['2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21',
'2011-12-26', '2012-01-02']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
assert_equal(
np.busday_offset('2011-10-03', 4, holidays=holidays),
np.busday_offset('2011-10-03', 4))
assert_equal(
np.busday_offset('2011-10-03', 5, holidays=holidays),
np.busday_offset('2011-10-03', 5 + 1))
assert_equal(
np.busday_offset('2011-10-03', 27, holidays=holidays),
np.busday_offset('2011-10-03', 27 + 1))
assert_equal(
np.busday_offset('2011-10-03', 28, holidays=holidays),
np.busday_offset('2011-10-03', 28 + 2))
assert_equal(
np.busday_offset('2011-10-03', 35, holidays=holidays),
np.busday_offset('2011-10-03', 35 + 2))
assert_equal(
np.busday_offset('2011-10-03', 36, holidays=holidays),
np.busday_offset('2011-10-03', 36 + 3))
assert_equal(
np.busday_offset('2011-10-03', 56, holidays=holidays),
np.busday_offset('2011-10-03', 56 + 3))
assert_equal(
np.busday_offset('2011-10-03', 57, holidays=holidays),
np.busday_offset('2011-10-03', 57 + 4))
assert_equal(
np.busday_offset('2011-10-03', 60, holidays=holidays),
np.busday_offset('2011-10-03', 60 + 4))
assert_equal(
np.busday_offset('2011-10-03', 61, holidays=holidays),
np.busday_offset('2011-10-03', 61 + 5))
assert_equal(
np.busday_offset('2011-10-03', 61, busdaycal=bdd),
np.busday_offset('2011-10-03', 61 + 5))
# A bigger backward jump across more than one week/holiday
assert_equal(
np.busday_offset('2012-01-03', -1, holidays=holidays),
np.busday_offset('2012-01-03', -1 - 1))
assert_equal(
np.busday_offset('2012-01-03', -4, holidays=holidays),
np.busday_offset('2012-01-03', -4 - 1))
assert_equal(
np.busday_offset('2012-01-03', -5, holidays=holidays),
np.busday_offset('2012-01-03', -5 - 2))
assert_equal(
np.busday_offset('2012-01-03', -25, holidays=holidays),
np.busday_offset('2012-01-03', -25 - 2))
assert_equal(
np.busday_offset('2012-01-03', -26, holidays=holidays),
np.busday_offset('2012-01-03', -26 - 3))
assert_equal(
np.busday_offset('2012-01-03', -33, holidays=holidays),
np.busday_offset('2012-01-03', -33 - 3))
assert_equal(
np.busday_offset('2012-01-03', -34, holidays=holidays),
np.busday_offset('2012-01-03', -34 - 4))
assert_equal(
np.busday_offset('2012-01-03', -56, holidays=holidays),
np.busday_offset('2012-01-03', -56 - 4))
assert_equal(
np.busday_offset('2012-01-03', -57, holidays=holidays),
np.busday_offset('2012-01-03', -57 - 5))
assert_equal(
np.busday_offset('2012-01-03', -57, busdaycal=bdd),
np.busday_offset('2012-01-03', -57 - 5))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
holidays=holidays, busdaycal=bdd)
# Roll with the holidays
assert_equal(
np.busday_offset('2011-12-25', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='backward', holidays=holidays),
np.datetime64('2011-12-23'))
assert_equal(
np.busday_offset('2012-02-27', 0,
roll='modifiedfollowing',
holidays=['2012-02-27', '2012-02-26', '2012-02-28',
'2012-03-01', '2012-02-29']),
np.datetime64('2012-02-24'))
assert_equal(
np.busday_offset('2012-03-06', 0,
roll='modifiedpreceding',
holidays=['2012-03-02', '2012-03-03', '2012-03-01',
'2012-03-05', '2012-03-07', '2012-03-06']),
np.datetime64('2012-03-08'))
def test_datetime_busday_holidays_count(self):
holidays=['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Validate against busday_offset broadcast against
# a range of offsets
dates = np.busday_offset('2011-01-01', np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count('2011-01-01', dates, busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count(dates, '2011-01-01', busdaycal=bdd),
-np.arange(366))
dates = np.busday_offset('2011-12-31', -np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count(dates, '2011-12-31', busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count('2011-12-31', dates, busdaycal=bdd),
-np.arange(366))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
holidays=holidays, busdaycal=bdd)
# Number of Mondays in March 2011
assert_equal(np.busday_count('2011-03', '2011-04', weekmask='Mon'), 4)
# Returns negative value when reversed
assert_equal(np.busday_count('2011-04', '2011-03', weekmask='Mon'), -4)
def test_datetime_is_busday(self):
holidays=['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10',
'NaT']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Weekend/weekday tests
assert_equal(np.is_busday('2011-01-01'), False)
assert_equal(np.is_busday('2011-01-02'), False)
assert_equal(np.is_busday('2011-01-03'), True)
# All the holidays are not business days
assert_equal(np.is_busday(holidays, busdaycal=bdd),
np.zeros(len(holidays), dtype='?'))
def test_datetime_y2038(self):
# Test parsing on either side of the Y2038 boundary
a = np.datetime64('2038-01-19T03:14:07Z')
assert_equal(a.view(np.int64), 2**31 - 1)
a = np.datetime64('2038-01-19T03:14:08Z')
assert_equal(a.view(np.int64), 2**31)
# Test parsing on either side of the Y2038 boundary with
# a manually specified timezone offset
a = np.datetime64('2038-01-19T04:14:07+0100')
assert_equal(a.view(np.int64), 2**31 - 1)
a = np.datetime64('2038-01-19T04:14:08+0100')
assert_equal(a.view(np.int64), 2**31)
# Test parsing a date after Y2038 in the local timezone
a = np.datetime64('2038-01-20T13:21:14')
assert_equal(str(a)[:-5], '2038-01-20T13:21:14')
class TestDateTimeData(TestCase):
def test_basic(self):
a = np.array(['1980-03-23'], dtype=np.datetime64)
assert_equal(np.datetime_data(a.dtype), ('D', 1))
if __name__ == "__main__":
run_module_suite()
| mit |
googleapis/googleapis-gen | google/cloud/bigquery/reservation/v1/bigquery-reservation-v1-py/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py | 1 | 40551 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.bigquery_reservation_v1.types import reservation
from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation
from google.protobuf import empty_pb2 # type: ignore
from .base import ReservationServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import ReservationServiceGrpcTransport
class ReservationServiceGrpcAsyncIOTransport(ReservationServiceTransport):
"""gRPC AsyncIO backend transport for ReservationService.
This API allows users to manage their flat-rate BigQuery
reservations.
A reservation provides computational resource guarantees, in the
form of `slots <https://cloud.google.com/bigquery/docs/slots>`__, to
users. A slot is a unit of computational power in BigQuery, and
serves as the basic unit of parallelism. In a scan of a
multi-partitioned table, a single slot operates on a single
partition of the table. A reservation resource exists as a child
resource of the admin project and location, e.g.:
``projects/myproject/locations/US/reservations/reservationName``.
A capacity commitment is a way to purchase compute capacity for
BigQuery jobs (in the form of slots) with some committed period of
usage. A capacity commitment resource exists as a child resource of
the admin project and location, e.g.:
``projects/myproject/locations/US/capacityCommitments/id``.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'bigqueryreservation.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'bigqueryreservation.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def create_reservation(self) -> Callable[
[gcbr_reservation.CreateReservationRequest],
Awaitable[gcbr_reservation.Reservation]]:
r"""Return a callable for the create reservation method over gRPC.
Creates a new reservation resource.
Returns:
Callable[[~.CreateReservationRequest],
Awaitable[~.Reservation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_reservation' not in self._stubs:
self._stubs['create_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservation',
request_serializer=gcbr_reservation.CreateReservationRequest.serialize,
response_deserializer=gcbr_reservation.Reservation.deserialize,
)
return self._stubs['create_reservation']
@property
def list_reservations(self) -> Callable[
[reservation.ListReservationsRequest],
Awaitable[reservation.ListReservationsResponse]]:
r"""Return a callable for the list reservations method over gRPC.
Lists all the reservations for the project in the
specified location.
Returns:
Callable[[~.ListReservationsRequest],
Awaitable[~.ListReservationsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_reservations' not in self._stubs:
self._stubs['list_reservations'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/ListReservations',
request_serializer=reservation.ListReservationsRequest.serialize,
response_deserializer=reservation.ListReservationsResponse.deserialize,
)
return self._stubs['list_reservations']
@property
def get_reservation(self) -> Callable[
[reservation.GetReservationRequest],
Awaitable[reservation.Reservation]]:
r"""Return a callable for the get reservation method over gRPC.
Returns information about the reservation.
Returns:
Callable[[~.GetReservationRequest],
Awaitable[~.Reservation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_reservation' not in self._stubs:
self._stubs['get_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/GetReservation',
request_serializer=reservation.GetReservationRequest.serialize,
response_deserializer=reservation.Reservation.deserialize,
)
return self._stubs['get_reservation']
@property
def delete_reservation(self) -> Callable[
[reservation.DeleteReservationRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete reservation method over gRPC.
Deletes a reservation. Returns
``google.rpc.Code.FAILED_PRECONDITION`` when reservation has
assignments.
Returns:
Callable[[~.DeleteReservationRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_reservation' not in self._stubs:
self._stubs['delete_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservation',
request_serializer=reservation.DeleteReservationRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_reservation']
@property
def update_reservation(self) -> Callable[
[gcbr_reservation.UpdateReservationRequest],
Awaitable[gcbr_reservation.Reservation]]:
r"""Return a callable for the update reservation method over gRPC.
Updates an existing reservation resource.
Returns:
Callable[[~.UpdateReservationRequest],
Awaitable[~.Reservation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_reservation' not in self._stubs:
self._stubs['update_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/UpdateReservation',
request_serializer=gcbr_reservation.UpdateReservationRequest.serialize,
response_deserializer=gcbr_reservation.Reservation.deserialize,
)
return self._stubs['update_reservation']
@property
def create_capacity_commitment(self) -> Callable[
[reservation.CreateCapacityCommitmentRequest],
Awaitable[reservation.CapacityCommitment]]:
r"""Return a callable for the create capacity commitment method over gRPC.
Creates a new capacity commitment resource.
Returns:
Callable[[~.CreateCapacityCommitmentRequest],
Awaitable[~.CapacityCommitment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_capacity_commitment' not in self._stubs:
self._stubs['create_capacity_commitment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/CreateCapacityCommitment',
request_serializer=reservation.CreateCapacityCommitmentRequest.serialize,
response_deserializer=reservation.CapacityCommitment.deserialize,
)
return self._stubs['create_capacity_commitment']
@property
def list_capacity_commitments(self) -> Callable[
[reservation.ListCapacityCommitmentsRequest],
Awaitable[reservation.ListCapacityCommitmentsResponse]]:
r"""Return a callable for the list capacity commitments method over gRPC.
Lists all the capacity commitments for the admin
project.
Returns:
Callable[[~.ListCapacityCommitmentsRequest],
Awaitable[~.ListCapacityCommitmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_capacity_commitments' not in self._stubs:
self._stubs['list_capacity_commitments'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/ListCapacityCommitments',
request_serializer=reservation.ListCapacityCommitmentsRequest.serialize,
response_deserializer=reservation.ListCapacityCommitmentsResponse.deserialize,
)
return self._stubs['list_capacity_commitments']
@property
def get_capacity_commitment(self) -> Callable[
[reservation.GetCapacityCommitmentRequest],
Awaitable[reservation.CapacityCommitment]]:
r"""Return a callable for the get capacity commitment method over gRPC.
Returns information about the capacity commitment.
Returns:
Callable[[~.GetCapacityCommitmentRequest],
Awaitable[~.CapacityCommitment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_capacity_commitment' not in self._stubs:
self._stubs['get_capacity_commitment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/GetCapacityCommitment',
request_serializer=reservation.GetCapacityCommitmentRequest.serialize,
response_deserializer=reservation.CapacityCommitment.deserialize,
)
return self._stubs['get_capacity_commitment']
@property
def delete_capacity_commitment(self) -> Callable[
[reservation.DeleteCapacityCommitmentRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete capacity commitment method over gRPC.
Deletes a capacity commitment. Attempting to delete capacity
commitment before its commitment_end_time will fail with the
error code ``google.rpc.Code.FAILED_PRECONDITION``.
Returns:
Callable[[~.DeleteCapacityCommitmentRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_capacity_commitment' not in self._stubs:
self._stubs['delete_capacity_commitment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/DeleteCapacityCommitment',
request_serializer=reservation.DeleteCapacityCommitmentRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_capacity_commitment']
@property
def update_capacity_commitment(self) -> Callable[
[reservation.UpdateCapacityCommitmentRequest],
Awaitable[reservation.CapacityCommitment]]:
r"""Return a callable for the update capacity commitment method over gRPC.
Updates an existing capacity commitment.
Only ``plan`` and ``renewal_plan`` fields can be updated.
Plan can only be changed to a plan of a longer commitment
period. Attempting to change to a plan with shorter commitment
period will fail with the error code
``google.rpc.Code.FAILED_PRECONDITION``.
Returns:
Callable[[~.UpdateCapacityCommitmentRequest],
Awaitable[~.CapacityCommitment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_capacity_commitment' not in self._stubs:
self._stubs['update_capacity_commitment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/UpdateCapacityCommitment',
request_serializer=reservation.UpdateCapacityCommitmentRequest.serialize,
response_deserializer=reservation.CapacityCommitment.deserialize,
)
return self._stubs['update_capacity_commitment']
@property
def split_capacity_commitment(self) -> Callable[
[reservation.SplitCapacityCommitmentRequest],
Awaitable[reservation.SplitCapacityCommitmentResponse]]:
r"""Return a callable for the split capacity commitment method over gRPC.
Splits capacity commitment to two commitments of the same plan
and ``commitment_end_time``.
A common use case is to enable downgrading commitments.
For example, in order to downgrade from 10000 slots to 8000, you
might split a 10000 capacity commitment into commitments of 2000
and 8000. Then, you would change the plan of the first one to
``FLEX`` and then delete it.
Returns:
Callable[[~.SplitCapacityCommitmentRequest],
Awaitable[~.SplitCapacityCommitmentResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'split_capacity_commitment' not in self._stubs:
self._stubs['split_capacity_commitment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/SplitCapacityCommitment',
request_serializer=reservation.SplitCapacityCommitmentRequest.serialize,
response_deserializer=reservation.SplitCapacityCommitmentResponse.deserialize,
)
return self._stubs['split_capacity_commitment']
@property
def merge_capacity_commitments(self) -> Callable[
[reservation.MergeCapacityCommitmentsRequest],
Awaitable[reservation.CapacityCommitment]]:
r"""Return a callable for the merge capacity commitments method over gRPC.
Merges capacity commitments of the same plan into a single
commitment.
The resulting capacity commitment has the greater
commitment_end_time out of the to-be-merged capacity
commitments.
Attempting to merge capacity commitments of different plan will
fail with the error code
``google.rpc.Code.FAILED_PRECONDITION``.
Returns:
Callable[[~.MergeCapacityCommitmentsRequest],
Awaitable[~.CapacityCommitment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'merge_capacity_commitments' not in self._stubs:
self._stubs['merge_capacity_commitments'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/MergeCapacityCommitments',
request_serializer=reservation.MergeCapacityCommitmentsRequest.serialize,
response_deserializer=reservation.CapacityCommitment.deserialize,
)
return self._stubs['merge_capacity_commitments']
@property
def create_assignment(self) -> Callable[
[reservation.CreateAssignmentRequest],
Awaitable[reservation.Assignment]]:
r"""Return a callable for the create assignment method over gRPC.
Creates an assignment object which allows the given project to
submit jobs of a certain type using slots from the specified
reservation.
Currently a resource (project, folder, organization) can only
have one assignment per each (job_type, location) combination,
and that reservation will be used for all jobs of the matching
type.
Different assignments can be created on different levels of the
projects, folders or organization hierarchy. During query
execution, the assignment is looked up at the project, folder
and organization levels in that order. The first assignment
found is applied to the query.
When creating assignments, it does not matter if other
assignments exist at higher levels.
Example:
- The organization ``organizationA`` contains two projects,
``project1`` and ``project2``.
- Assignments for all three entities (``organizationA``,
``project1``, and ``project2``) could all be created and
mapped to the same or different reservations.
Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not
have 'bigquery.admin' permissions on the project using the
reservation and the project that owns this reservation.
Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of
the assignment does not match location of the reservation.
Returns:
Callable[[~.CreateAssignmentRequest],
Awaitable[~.Assignment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_assignment' not in self._stubs:
self._stubs['create_assignment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/CreateAssignment',
request_serializer=reservation.CreateAssignmentRequest.serialize,
response_deserializer=reservation.Assignment.deserialize,
)
return self._stubs['create_assignment']
@property
def list_assignments(self) -> Callable[
[reservation.ListAssignmentsRequest],
Awaitable[reservation.ListAssignmentsResponse]]:
r"""Return a callable for the list assignments method over gRPC.
Lists assignments.
Only explicitly created assignments will be returned.
Example:
- Organization ``organizationA`` contains two projects,
``project1`` and ``project2``.
- Reservation ``res1`` exists and was created previously.
- CreateAssignment was used previously to define the following
associations between entities and reservations:
``<organizationA, res1>`` and ``<project1, res1>``
In this example, ListAssignments will just return the above two
assignments for reservation ``res1``, and no expansion/merge
will happen.
The wildcard "-" can be used for reservations in the request. In
that case all assignments belongs to the specified project and
location will be listed.
**Note** "-" cannot be used for projects nor locations.
Returns:
Callable[[~.ListAssignmentsRequest],
Awaitable[~.ListAssignmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_assignments' not in self._stubs:
self._stubs['list_assignments'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/ListAssignments',
request_serializer=reservation.ListAssignmentsRequest.serialize,
response_deserializer=reservation.ListAssignmentsResponse.deserialize,
)
return self._stubs['list_assignments']
@property
def delete_assignment(self) -> Callable[
[reservation.DeleteAssignmentRequest],
Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete assignment method over gRPC.
Deletes a assignment. No expansion will happen.
Example:
- Organization ``organizationA`` contains two projects,
``project1`` and ``project2``.
- Reservation ``res1`` exists and was created previously.
- CreateAssignment was used previously to define the following
associations between entities and reservations:
``<organizationA, res1>`` and ``<project1, res1>``
In this example, deletion of the ``<organizationA, res1>``
assignment won't affect the other assignment
``<project1, res1>``. After said deletion, queries from
``project1`` will still use ``res1`` while queries from
``project2`` will switch to use on-demand mode.
Returns:
Callable[[~.DeleteAssignmentRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_assignment' not in self._stubs:
self._stubs['delete_assignment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/DeleteAssignment',
request_serializer=reservation.DeleteAssignmentRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_assignment']
@property
def search_assignments(self) -> Callable[
[reservation.SearchAssignmentsRequest],
Awaitable[reservation.SearchAssignmentsResponse]]:
r"""Return a callable for the search assignments method over gRPC.
Looks up assignments for a specified resource for a particular
region. If the request is about a project:
1. Assignments created on the project will be returned if they
exist.
2. Otherwise assignments created on the closest ancestor will be
returned.
3. Assignments for different JobTypes will all be returned.
The same logic applies if the request is about a folder.
If the request is about an organization, then assignments
created on the organization will be returned (organization
doesn't have ancestors).
Comparing to ListAssignments, there are some behavior
differences:
1. permission on the assignee will be verified in this API.
2. Hierarchy lookup (project->folder->organization) happens in
this API.
3. Parent here is ``projects/*/locations/*``, instead of
``projects/*/locations/*reservations/*``.
**Note** "-" cannot be used for projects nor locations.
Returns:
Callable[[~.SearchAssignmentsRequest],
Awaitable[~.SearchAssignmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'search_assignments' not in self._stubs:
self._stubs['search_assignments'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/SearchAssignments',
request_serializer=reservation.SearchAssignmentsRequest.serialize,
response_deserializer=reservation.SearchAssignmentsResponse.deserialize,
)
return self._stubs['search_assignments']
@property
def move_assignment(self) -> Callable[
[reservation.MoveAssignmentRequest],
Awaitable[reservation.Assignment]]:
r"""Return a callable for the move assignment method over gRPC.
Moves an assignment under a new reservation.
This differs from removing an existing assignment and
recreating a new one by providing a transactional change
that ensures an assignee always has an associated
reservation.
Returns:
Callable[[~.MoveAssignmentRequest],
Awaitable[~.Assignment]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'move_assignment' not in self._stubs:
self._stubs['move_assignment'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/MoveAssignment',
request_serializer=reservation.MoveAssignmentRequest.serialize,
response_deserializer=reservation.Assignment.deserialize,
)
return self._stubs['move_assignment']
@property
def get_bi_reservation(self) -> Callable[
[reservation.GetBiReservationRequest],
Awaitable[reservation.BiReservation]]:
r"""Return a callable for the get bi reservation method over gRPC.
Retrieves a BI reservation.
Returns:
Callable[[~.GetBiReservationRequest],
Awaitable[~.BiReservation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_bi_reservation' not in self._stubs:
self._stubs['get_bi_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/GetBiReservation',
request_serializer=reservation.GetBiReservationRequest.serialize,
response_deserializer=reservation.BiReservation.deserialize,
)
return self._stubs['get_bi_reservation']
@property
def update_bi_reservation(self) -> Callable[
[reservation.UpdateBiReservationRequest],
Awaitable[reservation.BiReservation]]:
r"""Return a callable for the update bi reservation method over gRPC.
Updates a BI reservation.
Only fields specified in the ``field_mask`` are updated.
A singleton BI reservation always exists with default size 0. In
order to reserve BI capacity it needs to be updated to an amount
greater than 0. In order to release BI capacity reservation size
must be set to 0.
Returns:
Callable[[~.UpdateBiReservationRequest],
Awaitable[~.BiReservation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_bi_reservation' not in self._stubs:
self._stubs['update_bi_reservation'] = self.grpc_channel.unary_unary(
'/google.cloud.bigquery.reservation.v1.ReservationService/UpdateBiReservation',
request_serializer=reservation.UpdateBiReservationRequest.serialize,
response_deserializer=reservation.BiReservation.deserialize,
)
return self._stubs['update_bi_reservation']
__all__ = (
'ReservationServiceGrpcAsyncIOTransport',
)
| apache-2.0 |
atbaker/django-example | django-example/wsgi.py | 30 | 1460 | """
WSGI config for project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = ".settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config")
os.environ.setdefault("DJANGO_CONFIGURATION", "Production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from configurations.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application) | mit |
moiseslorap/RIT | Computer Science 1/Labs/lab7/implementation/rit_lib.py | 2 | 20621 | """
$Id: rit_lib.py,v 3.2 2015/09/04 13:20:59 jeh Exp $
An alternative to the namedtuple construct in the Python collections module.
This module creates classes with a fixed set of slots
For historical reasons, this library is known as the "quick class" library.
There are however two ways to build a quick class in this framework:
1. Inherit the struct class defined here.
2. Create a class by calling the quickClass function.
The only differences between using a quick class and a normal class
definition are as follows.
1. A default constructor is provided that takes positional or keyword
arguments to initialize the slots declared.
2. The methods __str__, __repr__, __eq__, and __ne__ are predefined.
3. The types of the class's slots (attributes) can optionally be defined
(via the _slots class variable in a class declaration or additional
arguments to the quickClass function) and checked at run time.
The differences between a class created through this package and one created
through collections.namedtuple are as follows.
1. Objects created via this library are not iterable.
2. The attributes in objects created by this library are writable.
NOTE: To disable type checking (for speed), add the following method
after and outside your class's definition. Substitute your class's name
for the name YOURCLASS
YOURCLASS.__setattr__ = object.__setattr__
"""
# Reasons for doing this:
# Less code to define a class and no need for a maker function means it
# is far less error prone.
# Type checking makes debugging easier since execution halts at the
# source of the problem (the assignment violation) versus later on (when
# accessed/used as an unexpected type).
# Built in "to string" representation (__str__ method) also makes
# debugging easier.
# As with manually declaring classes with a __slots__ class variable,
# objects enforce their predefined slot attributes and those attributes
# are still mutable (unlike namedtuples).
REV = "$Revision: 3.2 $"
from inspect import isclass
from sys import stderr
from collections import OrderedDict
##########################################################################
# #
# Abstract Base Class Section #
# #
##########################################################################
import abc # abstract base class library
def makeAbstractClass( className ):
""" Create and return an abstract class.
This is used for the run-time type checking that struct provides.
For more details on abstract base classes, see ABCMeta in package abc.
When this function returns, the created abstract class
has as yet no 'concrete' classes that conform to it.
Here is an example of how you use it:
Master = makeAbstractClass( "Master" )
... Create classes C1, C2, and C3 using struct or quickClass.
... On the other hand, any of them could be previously defined
types, too.
Master.addClasses( C1, C2, C3 )
C1, C2, and C3 are now subclasses of Master.
This means that if a quick class says that a slot must be
of type Master, then an instance of C1, C2, or C2 will work.
"""
class AbstractClass( metaclass=abc.ABCMeta ):
@classmethod
def addClasses( self, *classes ):
""" Establish the classes provided as arguments to this
function as 'concrete' classes that conform to this
abstract class.
"""
for cls in classes:
self.register( cls )
AbstractClass.__name__ = className
return AbstractClass
##########################################################################
# #
# Definition of struct class -- does all the setup and checking #
# #
##########################################################################
NoneType = type( None )
class struct( object ):
""" The base class for all classes created using this framework.
Note that the methods contained herein apply to classes inheriting
struct; it is not expected that classes declared as inheriting
struct would define their own methods, much less redefine
these.
That being said, since struct's subclasses will not explicitly
contain their own constructors, programmers of those subclasses must
be familiar with the API for the constructor defined here.
struct expects its subclasses to have one of the following
two class variables defined:
If __slots__ is defined, it is a tuple of names of class slots,
or attributes, represented as strings. These names are assumed
to be exactly the set of slots to be initialized, set, and
otherwise accessed. (Python enforces this.) However, the slots
can have any value; there is no type checking.
Example:
class Student( struct ):
__slots__ = ( "age", "gpa" )
If _slots is defined, it is a tuple of tuples. The inner
tuples are all of length 2. The first value of each such
pair is either a type or a tuple of types. The second
value is the name of the slot, as in the __slots__ case above.
It works just like the first case with __slots__, but now
the values that may be assigned to the slots must be of
(one of) the type(s) associated with it.
Example:
class Student( struct ):
_slots = ( (int,"age"), ((str,float),"gpa") )
The one exception is that, instead of a type one may use the string
name of the class being built. This is the way one refers to the
type one is building for structurally recursive types.
Note that mutually recursive types are not (yet) supported.
The class returned can be constructed using the provided name and
either positional or keyword arguments. See the __init__ method
for struct
"""
# Initially the new class's slots may have some of its types
# specified as strings. These need to be converted to real types.
# The class-level boolean variable _typesScanned
# indicates whether this class's type list has been scanned yet
# for str's. It's done in the instance constructor __init__.
#
_typesScanned = False
def __init__( self, *args, **kwargs ):
""" Initialize a new instance of a subclass of struct. The
constructor call argument list should contain a value for
each attribute of the class, presented either in the same
order as the _slots declaration (args), or with keyword names
that match the slot names (kwargs). These two approaches may
not be mixed; that is either args or kwargs must be empty.
args: a sequence of values for each slot declared in the subclass
kwargs: a dictionary of values for each slot declared in
the subclass. The keys in the dictionary match the
names of the slots.
"""
# Save the id and name of the subclass being instantiated.
thisClass = self.__class__
className = thisClass.__name__
if ( thisClass == struct ):
raise TypeError( "struct itself may not be instantiated." )
if not thisClass._typesScanned: # Do this upon FIRST instance creation.
# If __slots__ but no _slots, convert using type 'object'.
# or look for old, deprecated _types variable.
#
if "__slots__" in dir( thisClass ):
if "_slots" in dir( thisClass ):
raise TypeError( "struct subclasses may not have " + \
"a '_slots' attribute declared if\n" + \
" the standard '__slots__' attribute " + \
"is present." )
slots = thisClass.__slots__
if not ( isinstance( slots, tuple ) or \
isinstance( slots, list ) ):
slots = ( slots, )
newSlots = []
if "_types" in dir( thisClass ):
stderr.write( "struct warning: '_types' " + \
"variable is deprecated. (class " + \
className + ").\n" )
types = thisClass._types
if not ( isinstance( types, tuple ) or \
isinstance( types, list ) ):
types = ( types, )
if len( types ) != len( slots ):
raise TypeError(
"No. of slots differs from no. of types" )
for i in range( len( slots ) ):
newSlots.append( ( types[ i ], slots[ i ] ) )
else:
for attrName in thisClass.__slots__:
newSlots.append( ( object, attrName ) )
thisClass._slots = tuple( newSlots )
if "_slots" not in dir( thisClass ):
raise TypeError( "struct subclasses must have " + \
"either a '_slots' or '__slots__' " + \
"attribute declared." )
# Do error checking and convert the _slots variable
# to a dictionary mapping each variable name to a set
# of types.
#
_normalizeSlotsConstruction( thisClass )
# The above code is only exectuted the first time an object is
# created from the new class.
thisClass._typesScanned = True
if len( kwargs ) != 0:
# Make a copy of the slot dictionary so that it is easy to
# check if each slot is given a value exactly once.
#
slots = thisClass._slots.copy()
if len( args ) != 0:
raise TypeError( "NamedTuples cannot be initialized with " +\
"a combination of regular and " +\
"keyword arguments" )
else:
for key in kwargs:
if key not in thisClass._slots:
raise AttributeError( "'" + className + "' object " +\
"has no attribute named '" +\
key + "'" )
else:
attrValue = kwargs[ key ]
setattr( self, key, attrValue )
del slots[ key ]
if len( slots ) != 0:
raise TypeError( "Constructor call for " + className +\
" did not get initialization values " +\
"for " + str( slots.keys() ) )
else:
if len( args ) != len( thisClass._slots ):
raise TypeError( "Constructor call for " + className +\
" expected " + \
str( len( thisClass._slots ) ) + \
" arguments but got " + str( len( args ) ) )
else:
i = 0
for key in thisClass._slots:
setattr( self, key, args[ i ] )
i += 1
def __eq__( self, other ):
""" (DO NOT call this function directly; access it via the '=='
operator.)
Answer False if other is not the same type as self, or if
the values of the slots in the two objects are not all equal
(through the use of the '!=' operator, i.e., __ne__).
Answer True otherwise.
Precondition: the object must not contain circular references.
If it does, this method must be redefined in the subclass.
"""
visited = set() # for pairs of ids already seen
return struct._equal( self, other, visited )
@staticmethod
def _equal( o0, o1, visited ):
id0 = id( o0 )
id1 = id( o1 )
if id0 == id1: return True
if type( o0 ) != type( o1 ): return False
if not isinstance( o0, struct ): return o0 == o1
visited.add( ( id0, id1 ) )
for slotName in o1.__class__._slots: # keys of dictionary
s0 = getattr( o0, slotName )
sid0 = id( s0 )
s1 = getattr( o1, slotName )
sid1 = id( s1 )
if ( sid0, sid1 ) not in visited:
visited.add( ( sid0, sid1 ) )
if not struct._equal( s0, s1, visited ):
return False
return True
def __ne__( self, other ):
""" (DO NOT call this function directly; access it via the '!='
operator.)
Answer not ( self == other ), i.e., not self.__eq__( other ).
"""
return not ( self == other )
def __str__( self ):
""" (DO NOT call this function directly; access it via the str
global function.)
Return a string representation of the value of this object
using its class's name followed by a listing the values of
all of its slots.
If the object contains multiple references involving only
structs, the returned string will be degraded to avoid
potential infinite recursion.
"""
visited = set()
return self._str_rep( set(), ": " )
def __repr__( self ):
""" (DO NOT call this function directly; access it via the repr
global function.)
Return a string that, if evaluated, would re-create this object.
If the object contains multiple references involving only
structs, the returned string will be degraded to avoid
potential infinite recursion.
"""
return self._str_rep( set(), "=" )
def _str_rep( self, visited, sep ):
""" Called by both __str__ and __repr__. Only difference is
the characters separating each slot name from its value.
"""
thisClass = self.__class__
className = thisClass.__name__
slots = tuple( thisClass._slots.keys() )
if len( slots ) != 0:
result = className + "( "
lastSlot = slots[ -1 ]
for slot in slots:
obj = getattr( self, slot )
objID = id( obj )
if objID not in visited:
if isinstance( obj, struct ):
slotStr = obj._str_rep( visited | set([id(self)]), sep )
else:
slotStr = repr( obj )
result += slot + sep + slotStr
else:
result += slot + sep + "..."
if slot != lastSlot:
result += ", "
result += " )"
else:
result = className + "()"
return result
def __setattr__( self, name, value ):
""" This is a private function. Do NOT directly call it.
It checks attribute (slot) references for type validity.
"""
thisClass = self.__class__
slots = thisClass._slots
if name not in slots:
raise AttributeError( repr( thisClass.__name__ ) + \
" object has no attribute " + repr( name ) )
paramTypes = slots[ name ]
# Even though paramTypes is a set we must iterate due to
# need to check for subclass with isinstance().
#
ok = False
for paramType in paramTypes:
# print( "Checking if", value, "is a", paramType )
if isinstance( value, paramType ):
ok = True
break
if ok:
object.__setattr__( self, name, value )
else:
raise TypeError( "Type of " + name + \
" may not be " + type( value ) .__name__ )
def _normalizeSlotsConstruction( cls ):
""" The form of the _slots variable should be either
TSet, str
or
( (TSet,str), (TSet,str), ... )
where TSet = T or ( T, T, ... )
and
T = str or type
This function checks everything and converts the _slots
variable into a dictionary.
"""
# Error check: Make sure that _slots is a tuple.
if type( cls._slots ) != tuple:
raise TypeError( "_slots attribute must be a tuple" )
slots = cls._slots
# Fix: If only one slot, convert single entry to a tuple containing it.
if len( slots ) == 2 and type( slots[ 1 ] ) == str: #singleton case
slots = ( slots, )
# Error check: Make sure that all the structures within are legal.
for tspec in slots:
if type( tspec ) != tuple:
raise TypeError( "Improper _slots attribute" )
if type( tspec[ 1 ] ) != str or ( not isclass( tspec[ 0 ] ) and \
type( tspec[ 0 ] ) != str and \
type( tspec[ 0 ] ) != tuple ):
raise TypeError( "Improper _slots attribute" )
if type( tspec[ 0 ] ) == tuple:
for t in tspec[ 0 ]:
if not isclass( t ) and type( t ) != str:
raise TypeError( "Improper type spec in _slots attribute" )
# Create the dictionary, performing more validity checks along
# the way.
#
slotd = OrderedDict()
for tspec in slots:
# Fix the type part of the pair.
if isclass( tspec[ 0 ] ): # type to tuple containing type
types = set( ( tspec[ 0 ], ) )
elif type( tspec[ 0 ] ) == str: # string name of type to type
if tspec[ 0 ] != cls.__name__:
raise TypeError( "The type string name given was '" + \
tspec[ 0 ] + "'. Only '" + \
cls.__name__ + "' is allowed here." )
types = set( ( cls, ) )
else: # Must be a tuple of types due to previous checks
types = set()
for t in tspec[ 0 ]:
if isclass( t ):
types.add( t )
elif type( t ) == str:
if t != cls.__name__:
raise TypeError( "The type string name given was '" + \
t + "'. Only '" + \
cls.__name__ + "' is allowed here." )
types.add( cls )
else:
raise TypeError( str( t ) + " is not a type" )
# Check the variable name part of the pair
if type( tspec[ 1 ] ) != str:
raise TypeError( str( tspec[ 1 ] ) + " is not a string" )
slotd[ tspec[ 1 ] ] = types
# Put it back in the _slots class variable
cls._slots = slotd
##########################################################################
# #
# quickClass function -- creates an struct subclass #
# #
##########################################################################
def quickClass( name, *slotDecls ):
""" Return a new class that has the provided name and slots (attributes).
(This is an alternative to the explicit class declaration using the
base class struct.)
slotDecls: a sequence of slot declarations
Each slot declaration provided is a 2-tuple, with the slot's type
or tuple of types first and the slot's name second.
The one exception is that, instead of a type one may use the string
name of the class being built. This is the way one refers to the
type one is building for structurally recursive types.
Note that mutually recursive types are not (yet) supported.
The class returned can be constructed using the provided name and
either positional or keyword arguments. See the __init__ method
for struct
"""
return type( name, ( struct, ), { '_slots': slotDecls } )
| mit |
cachedout/libnacl | tests/unit/test_blake.py | 6 | 1476 | # Import nacl libs
import libnacl.blake
# Import python libs
import unittest
class TestBlake(unittest.TestCase):
'''
Test sign functions
'''
def test_keyless_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
chash1 = libnacl.crypto_generichash(msg1)
chash2 = libnacl.crypto_generichash(msg2)
self.assertNotEqual(msg1, chash1)
self.assertNotEqual(msg2, chash2)
self.assertNotEqual(chash2, chash1)
def test_key_blake(self):
msg1 = b'Are you suggesting coconuts migrate?'
msg2 = b'Not at all, they could be carried.'
key1 = libnacl.utils.rand_nonce()
key2 = libnacl.utils.rand_nonce()
khash1_1 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_1_2 = libnacl.blake.Blake2b(msg1, key1).digest()
khash1_2 = libnacl.blake.Blake2b(msg1, key2).digest()
khash2_1 = libnacl.blake.blake2b(msg2, key1).digest()
khash2_2 = libnacl.blake.blake2b(msg2, key2).digest()
self.assertNotEqual(msg1, khash1_1)
self.assertNotEqual(msg1, khash1_2)
self.assertNotEqual(msg2, khash2_1)
self.assertNotEqual(msg2, khash2_2)
self.assertNotEqual(khash1_1, khash1_2)
self.assertNotEqual(khash2_1, khash2_2)
self.assertNotEqual(khash1_1, khash2_1)
self.assertNotEqual(khash1_2, khash2_2)
self.assertEqual(khash1_1, khash1_1_2)
| apache-2.0 |
hpcugent/easybuild-easyblocks | easybuild/easyblocks/e/espresso.py | 4 | 2783 | ##
# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild
#
# Copyright:: Copyright 2012-2019 Uni.Lu/LCSB, NTUA
# Authors:: Josh Berryman <the.real.josh.berryman@gmail.com>, Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste
# License:: MIT/GPL
# $Id$
#
# This work implements a part of the HPCBIOS project and is a component of the policy:
# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-80.html
##
"""
EasyBuild support for building and installing ESPResSo, implemented as an easyblock
@author: Josh Berryman <the.real.josh.berryman@gmail.com>
@author: Fotis Georgatos (Uni.Lu)
@author: Kenneth Hoste (Ghent University)
"""
import os
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.run import run_cmd
class EB_ESPResSo(ConfigureMake):
"""Support for building/installing ESPResSo, parallel version."""
def __init__(self, *args, **kwargs):
"""Specify to build in install dir."""
super(EB_ESPResSo, self).__init__(*args, **kwargs)
self.build_in_installdir = True
self.install_subdir = '%s-%s' % (self.name.lower(), self.version)
@staticmethod
def extra_options():
extra_vars = {
'runtest': [True, "Run ESPResSo tests.", CUSTOM],
}
return ConfigureMake.extra_options(extra_vars)
def test_step(self):
"""Custom built-in test procedure for ESPResSo, parallel version."""
if self.cfg['runtest']:
cmd = './runtest.sh -p 2 *.tcl'
(out, ec) = run_cmd(cmd, simple=False, log_all=False, log_ok=False, path="testsuite")
if ec:
# ESPResSo fails many of its tests in version 3.1.1, and the test script itself is buggy
# so, just provide output in log file, but ignore things if it fails
self.log.warning("ESPResSo test failed (exit code: %s): %s" % (ec, out))
else:
self.log.info("Successful ESPResSo test completed: %s" % out)
def install_step(self):
"""Build is done in install dir, so no separate install step."""
pass
def sanity_check_step(self):
"""Custom sanity check for ESPResSo."""
custom_paths = {
'files': [os.path.join(self.install_subdir, 'Espresso')],
'dirs': [os.path.join(self.install_subdir, x) for x in ['samples', 'scripts', 'tools']],
}
super(EB_ESPResSo, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""Customize PATH for ESPResSo."""
guesses = super(EB_ESPResSo, self).make_module_req_guess()
guesses.update({'PATH': [self.install_subdir]})
return guesses
| gpl-2.0 |
bprodoehl/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/flakytestreporter.py | 116 | 10213 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import codecs
import logging
import os.path
from webkitpy.common.net.layouttestresults import path_for_layout_test, LayoutTestResults
from webkitpy.common.config import urls
from webkitpy.tool.bot.botinfo import BotInfo
from webkitpy.tool.grammar import plural, pluralize, join_with_separators
_log = logging.getLogger(__name__)
class FlakyTestReporter(object):
def __init__(self, tool, bot_name):
self._tool = tool
self._bot_name = bot_name
# FIXME: Use the real port object
self._bot_info = BotInfo(tool, tool.deprecated_port().name())
def _author_emails_for_test(self, flaky_test):
test_path = path_for_layout_test(flaky_test)
commit_infos = self._tool.checkout().recent_commit_infos_for_files([test_path])
# This ignores authors which are not committers because we don't have their bugzilla_email.
return set([commit_info.author().bugzilla_email() for commit_info in commit_infos if commit_info.author()])
def _bugzilla_email(self):
# FIXME: This is kinda a funny way to get the bugzilla email,
# we could also just create a Credentials object directly
# but some of the Credentials logic is in bugzilla.py too...
self._tool.bugs.authenticate()
return self._tool.bugs.username
# FIXME: This should move into common.config
_bot_emails = set([
"commit-queue@webkit.org", # commit-queue
"eseidel@chromium.org", # old commit-queue
"webkit.review.bot@gmail.com", # style-queue, sheriff-bot, CrLx/Gtk EWS
"buildbot@hotmail.com", # Win EWS
# Mac EWS currently uses eric@webkit.org, but that's not normally a bot
])
def _lookup_bug_for_flaky_test(self, flaky_test):
bugs = self._tool.bugs.queries.fetch_bugs_matching_search(search_string=flaky_test)
if not bugs:
return None
# Match any bugs which are from known bots or the email this bot is using.
allowed_emails = self._bot_emails | set([self._bugzilla_email])
bugs = filter(lambda bug: bug.reporter_email() in allowed_emails, bugs)
if not bugs:
return None
if len(bugs) > 1:
# FIXME: There are probably heuristics we could use for finding
# the right bug instead of the first, like open vs. closed.
_log.warn("Found %s %s matching '%s' filed by a bot, using the first." % (pluralize('bug', len(bugs)), [bug.id() for bug in bugs], flaky_test))
return bugs[0]
def _view_source_url_for_test(self, test_path):
return urls.view_source_url("LayoutTests/%s" % test_path)
def _create_bug_for_flaky_test(self, flaky_test, author_emails, latest_flake_message):
format_values = {
'test': flaky_test,
'authors': join_with_separators(sorted(author_emails)),
'flake_message': latest_flake_message,
'test_url': self._view_source_url_for_test(flaky_test),
'bot_name': self._bot_name,
}
title = "Flaky Test: %(test)s" % format_values
description = """This is an automatically generated bug from the %(bot_name)s.
%(test)s has been flaky on the %(bot_name)s.
%(test)s was authored by %(authors)s.
%(test_url)s
%(flake_message)s
The bots will update this with information from each new failure.
If you believe this bug to be fixed or invalid, feel free to close. The bots will re-open if the flake re-occurs.
If you would like to track this test fix with another bug, please close this bug as a duplicate. The bots will follow the duplicate chain when making future comments.
""" % format_values
master_flake_bug = 50856 # MASTER: Flaky tests found by the commit-queue
return self._tool.bugs.create_bug(title, description,
component="Tools / Tests",
cc=",".join(author_emails),
blocked="50856")
# This is over-engineered, but it makes for pretty bug messages.
def _optional_author_string(self, author_emails):
if not author_emails:
return ""
heading_string = plural('author') if len(author_emails) > 1 else 'author'
authors_string = join_with_separators(sorted(author_emails))
return " (%s: %s)" % (heading_string, authors_string)
def _latest_flake_message(self, flaky_result, patch):
failure_messages = [failure.message() for failure in flaky_result.failures]
flake_message = "The %s just saw %s flake (%s) while processing attachment %s on bug %s." % (self._bot_name, flaky_result.test_name, ", ".join(failure_messages), patch.id(), patch.bug_id())
return "%s\n%s" % (flake_message, self._bot_info.summary_text())
def _results_diff_path_for_test(self, test_path):
# FIXME: This is a big hack. We should get this path from results.json
# except that old-run-webkit-tests doesn't produce a results.json
# so we just guess at the file path.
(test_path_root, _) = os.path.splitext(test_path)
return "%s-diffs.txt" % test_path_root
def _follow_duplicate_chain(self, bug):
while bug.is_closed() and bug.duplicate_of():
bug = self._tool.bugs.fetch_bug(bug.duplicate_of())
return bug
def _update_bug_for_flaky_test(self, bug, latest_flake_message):
self._tool.bugs.post_comment_to_bug(bug.id(), latest_flake_message)
# This method is needed because our archive paths include a leading tmp/layout-test-results
def _find_in_archive(self, path, archive):
for archived_path in archive.namelist():
# Archives are currently created with full paths.
if archived_path.endswith(path):
return archived_path
return None
def _attach_failure_diff(self, flake_bug_id, flaky_test, results_archive_zip):
results_diff_path = self._results_diff_path_for_test(flaky_test)
# Check to make sure that the path makes sense.
# Since we're not actually getting this path from the results.html
# there is a chance it's wrong.
bot_id = self._tool.status_server.bot_id or "bot"
archive_path = self._find_in_archive(results_diff_path, results_archive_zip)
if archive_path:
results_diff = results_archive_zip.read(archive_path)
description = "Failure diff from %s" % bot_id
self._tool.bugs.add_attachment_to_bug(flake_bug_id, results_diff, description, filename="failure.diff")
else:
_log.warn("%s does not exist in results archive, uploading entire archive." % results_diff_path)
description = "Archive of layout-test-results from %s" % bot_id
# results_archive is a ZipFile object, grab the File object (.fp) to pass to Mechanize for uploading.
results_archive_file = results_archive_zip.fp
# Rewind the file object to start (since Mechanize won't do that automatically)
# See https://bugs.webkit.org/show_bug.cgi?id=54593
results_archive_file.seek(0)
self._tool.bugs.add_attachment_to_bug(flake_bug_id, results_archive_file, description, filename="layout-test-results.zip")
def report_flaky_tests(self, patch, flaky_test_results, results_archive):
message = "The %s encountered the following flaky tests while processing attachment %s:\n\n" % (self._bot_name, patch.id())
for flaky_result in flaky_test_results:
flaky_test = flaky_result.test_name
bug = self._lookup_bug_for_flaky_test(flaky_test)
latest_flake_message = self._latest_flake_message(flaky_result, patch)
author_emails = self._author_emails_for_test(flaky_test)
if not bug:
_log.info("Bug does not already exist for %s, creating." % flaky_test)
flake_bug_id = self._create_bug_for_flaky_test(flaky_test, author_emails, latest_flake_message)
else:
bug = self._follow_duplicate_chain(bug)
# FIXME: Ideally we'd only make one comment per flake, not two. But that's not possible
# in all cases (e.g. when reopening), so for now file attachment and comment are separate.
self._update_bug_for_flaky_test(bug, latest_flake_message)
flake_bug_id = bug.id()
self._attach_failure_diff(flake_bug_id, flaky_test, results_archive)
message += "%s bug %s%s\n" % (flaky_test, flake_bug_id, self._optional_author_string(author_emails))
message += "The %s is continuing to process your patch." % self._bot_name
self._tool.bugs.post_comment_to_bug(patch.bug_id(), message)
| bsd-3-clause |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/gettext.py | 90 | 17661 | """Internationalization and localization support.
This module provides internationalization (I18N) and localization (L10N)
support for your Python programs by providing an interface to the GNU gettext
message catalog library.
I18N refers to the operation by which a program is made aware of multiple
languages. L10N refers to the adaptation of your program, once
internationalized, to the local language and cultural habits.
"""
# This module represents the integration of work, contributions, feedback, and
# suggestions from the following people:
#
# Martin von Loewis, who wrote the initial implementation of the underlying
# C-based libintlmodule (later renamed _gettext), along with a skeletal
# gettext.py implementation.
#
# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule,
# which also included a pure-Python implementation to read .mo files if
# intlmodule wasn't available.
#
# James Henstridge, who also wrote a gettext.py module, which has some
# interesting, but currently unsupported experimental features: the notion of
# a Catalog class and instances, and the ability to add to a catalog file via
# a Python API.
#
# Barry Warsaw integrated these modules, wrote the .install() API and code,
# and conformed all C and Python code to Python's coding standards.
#
# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this
# module.
#
# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs.
#
# TODO:
# - Lazy loading of .mo files. Currently the entire catalog is loaded into
# memory, but that's probably bad for large translated programs. Instead,
# the lexical sort of original strings in GNU .mo files should be exploited
# to do binary searches and lazy initializations. Or you might want to use
# the undocumented double-hash algorithm for .mo files with hash tables, but
# you'll need to study the GNU gettext code to do this.
#
# - Support Solaris .mo file formats. Unfortunately, we've been unable to
# find this format documented anywhere.
import locale, copy, io, os, re, struct, sys
from errno import ENOENT
__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
'find', 'translation', 'install', 'textdomain', 'bindtextdomain',
'dgettext', 'dngettext', 'gettext', 'ngettext',
]
_default_localedir = os.path.join(sys.base_prefix, 'share', 'locale')
def c2py(plural):
"""Gets a C expression as used in PO files for plural forms and returns a
Python lambda function that implements an equivalent expression.
"""
# Security check, allow only the "n" identifier
import token, tokenize
tokens = tokenize.generate_tokens(io.StringIO(plural).readline)
try:
danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
except tokenize.TokenError:
raise ValueError('plural forms expression error, maybe unbalanced parenthesis')
else:
if danger:
raise ValueError('plural forms expression could be dangerous')
# Replace some C operators by their Python equivalents
plural = plural.replace('&&', ' and ')
plural = plural.replace('||', ' or ')
expr = re.compile(r'\!([^=])')
plural = expr.sub(' not \\1', plural)
# Regular expression and replacement function used to transform
# "a?b:c" to "b if a else c".
expr = re.compile(r'(.*?)\?(.*?):(.*)')
def repl(x):
return "(%s if %s else %s)" % (x.group(2), x.group(1),
expr.sub(repl, x.group(3)))
# Code to transform the plural expression, taking care of parentheses
stack = ['']
for c in plural:
if c == '(':
stack.append('')
elif c == ')':
if len(stack) == 1:
# Actually, we never reach this code, because unbalanced
# parentheses get caught in the security check at the
# beginning.
raise ValueError('unbalanced parenthesis in plural form')
s = expr.sub(repl, stack.pop())
stack[-1] += '(%s)' % s
else:
stack[-1] += c
plural = expr.sub(repl, stack.pop())
return eval('lambda n: int(%s)' % plural)
def _expand_lang(loc):
loc = locale.normalize(loc)
COMPONENT_CODESET = 1 << 0
COMPONENT_TERRITORY = 1 << 1
COMPONENT_MODIFIER = 1 << 2
# split up the locale into its base components
mask = 0
pos = loc.find('@')
if pos >= 0:
modifier = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_MODIFIER
else:
modifier = ''
pos = loc.find('.')
if pos >= 0:
codeset = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_CODESET
else:
codeset = ''
pos = loc.find('_')
if pos >= 0:
territory = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_TERRITORY
else:
territory = ''
language = loc
ret = []
for i in range(mask+1):
if not (i & ~mask): # if all components for this combo exist ...
val = language
if i & COMPONENT_TERRITORY: val += territory
if i & COMPONENT_CODESET: val += codeset
if i & COMPONENT_MODIFIER: val += modifier
ret.append(val)
ret.reverse()
return ret
class NullTranslations:
def __init__(self, fp=None):
self._info = {}
self._charset = None
self._output_charset = None
self._fallback = None
if fp is not None:
self._parse(fp)
def _parse(self, fp):
pass
def add_fallback(self, fallback):
if self._fallback:
self._fallback.add_fallback(fallback)
else:
self._fallback = fallback
def gettext(self, message):
if self._fallback:
return self._fallback.gettext(message)
return message
def lgettext(self, message):
if self._fallback:
return self._fallback.lgettext(message)
return message
def ngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def lngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def info(self):
return self._info
def charset(self):
return self._charset
def output_charset(self):
return self._output_charset
def set_output_charset(self, charset):
self._output_charset = charset
def install(self, names=None):
import builtins
builtins.__dict__['_'] = self.gettext
if hasattr(names, "__contains__"):
if "gettext" in names:
builtins.__dict__['gettext'] = builtins.__dict__['_']
if "ngettext" in names:
builtins.__dict__['ngettext'] = self.ngettext
if "lgettext" in names:
builtins.__dict__['lgettext'] = self.lgettext
if "lngettext" in names:
builtins.__dict__['lngettext'] = self.lngettext
class GNUTranslations(NullTranslations):
# Magic number of .mo files
LE_MAGIC = 0x950412de
BE_MAGIC = 0xde120495
def _parse(self, fp):
"""Override this method to support alternative .mo formats."""
unpack = struct.unpack
filename = getattr(fp, 'name', '')
# Parse the .mo file header, which consists of 5 little endian 32
# bit words.
self._catalog = catalog = {}
self.plural = lambda n: int(n != 1) # germanic plural by default
buf = fp.read()
buflen = len(buf)
# Are we big endian or little endian?
magic = unpack('<I', buf[:4])[0]
if magic == self.LE_MAGIC:
version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
ii = '<II'
elif magic == self.BE_MAGIC:
version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
ii = '>II'
else:
raise OSError(0, 'Bad magic number', filename)
# Now put all messages from the .mo file buffer into the catalog
# dictionary.
for i in range(0, msgcount):
mlen, moff = unpack(ii, buf[masteridx:masteridx+8])
mend = moff + mlen
tlen, toff = unpack(ii, buf[transidx:transidx+8])
tend = toff + tlen
if mend < buflen and tend < buflen:
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
raise OSError(0, 'File is corrupt', filename)
# See if we're looking at GNU .mo conventions for metadata
if mlen == 0:
# Catalog description
lastk = k = None
for b_item in tmsg.split('\n'.encode("ascii")):
item = b_item.decode().strip()
if not item:
continue
if ':' in item:
k, v = item.split(':', 1)
k = k.strip().lower()
v = v.strip()
self._info[k] = v
lastk = k
elif lastk:
self._info[lastk] += '\n' + item
if k == 'content-type':
self._charset = v.split('charset=')[1]
elif k == 'plural-forms':
v = v.split(';')
plural = v[1].split('plural=')[1]
self.plural = c2py(plural)
# Note: we unconditionally convert both msgids and msgstrs to
# Unicode using the character encoding specified in the charset
# parameter of the Content-Type header. The gettext documentation
# strongly encourages msgids to be us-ascii, but some applications
# require alternative encodings (e.g. Zope's ZCML and ZPT). For
# traditional gettext applications, the msgid conversion will
# cause no problems since us-ascii should always be a subset of
# the charset encoding. We may want to fall back to 8-bit msgids
# if the Unicode conversion fails.
charset = self._charset or 'ascii'
if b'\x00' in msg:
# Plural forms
msgid1, msgid2 = msg.split(b'\x00')
tmsg = tmsg.split(b'\x00')
msgid1 = str(msgid1, charset)
for i, x in enumerate(tmsg):
catalog[(msgid1, i)] = str(x, charset)
else:
catalog[str(msg, charset)] = str(tmsg, charset)
# advance to next entry in the seek tables
masteridx += 8
transidx += 8
def lgettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.lgettext(message)
return message
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
def lngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
except KeyError:
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def gettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.gettext(message)
return message
return tmsg
def ngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
tmsg = msgid1
else:
tmsg = msgid2
return tmsg
# Locate a .mo file using the gettext strategy
def find(domain, localedir=None, languages=None, all=False):
# Get some reasonable defaults for arguments that were not supplied
if localedir is None:
localedir = _default_localedir
if languages is None:
languages = []
for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
val = os.environ.get(envar)
if val:
languages = val.split(':')
break
if 'C' not in languages:
languages.append('C')
# now normalize and expand the languages
nelangs = []
for lang in languages:
for nelang in _expand_lang(lang):
if nelang not in nelangs:
nelangs.append(nelang)
# select a language
if all:
result = []
else:
result = None
for lang in nelangs:
if lang == 'C':
break
mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
if os.path.exists(mofile):
if all:
result.append(mofile)
else:
return mofile
return result
# a mapping between absolute .mo file path and Translation object
_translations = {}
def translation(domain, localedir=None, languages=None,
class_=None, fallback=False, codeset=None):
if class_ is None:
class_ = GNUTranslations
mofiles = find(domain, localedir, languages, all=True)
if not mofiles:
if fallback:
return NullTranslations()
raise OSError(ENOENT, 'No translation file found for domain', domain)
# Avoid opening, reading, and parsing the .mo file after it's been done
# once.
result = None
for mofile in mofiles:
key = (class_, os.path.abspath(mofile))
t = _translations.get(key)
if t is None:
with open(mofile, 'rb') as fp:
t = _translations.setdefault(key, class_(fp))
# Copy the translation object to allow setting fallbacks and
# output charset. All other instance data is shared with the
# cached object.
t = copy.copy(t)
if codeset:
t.set_output_charset(codeset)
if result is None:
result = t
else:
result.add_fallback(t)
return result
def install(domain, localedir=None, codeset=None, names=None):
t = translation(domain, localedir, fallback=True, codeset=codeset)
t.install(names)
# a mapping b/w domains and locale directories
_localedirs = {}
# a mapping b/w domains and codesets
_localecodesets = {}
# current global domain, `messages' used for compatibility w/ GNU gettext
_current_domain = 'messages'
def textdomain(domain=None):
global _current_domain
if domain is not None:
_current_domain = domain
return _current_domain
def bindtextdomain(domain, localedir=None):
global _localedirs
if localedir is not None:
_localedirs[domain] = localedir
return _localedirs.get(domain, _default_localedir)
def bind_textdomain_codeset(domain, codeset=None):
global _localecodesets
if codeset is not None:
_localecodesets[domain] = codeset
return _localecodesets.get(domain)
def dgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
return message
return t.gettext(message)
def ldgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
return message
return t.lgettext(message)
def dngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
if n == 1:
return msgid1
else:
return msgid2
return t.ngettext(msgid1, msgid2, n)
def ldngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
if n == 1:
return msgid1
else:
return msgid2
return t.lngettext(msgid1, msgid2, n)
def gettext(message):
return dgettext(_current_domain, message)
def lgettext(message):
return ldgettext(_current_domain, message)
def ngettext(msgid1, msgid2, n):
return dngettext(_current_domain, msgid1, msgid2, n)
def lngettext(msgid1, msgid2, n):
return ldngettext(_current_domain, msgid1, msgid2, n)
# dcgettext() has been deemed unnecessary and is not implemented.
# James Henstridge's Catalog constructor from GNOME gettext. Documented usage
# was:
#
# import gettext
# cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR)
# _ = cat.gettext
# print _('Hello World')
# The resulting catalog object currently don't support access through a
# dictionary API, which was supported (but apparently unused) in GNOME
# gettext.
Catalog = translation
| gpl-2.0 |
simonzhangsm/PivotalHD | pcf-elastic-runtime-1.1.0.0-modified-linux-kernel/original-linux-lts-backport-oneiric-3.0.0/Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
DirkdeDraak/easybuild-easyblocks | easybuild/easyblocks/generic/versionindependendpythonpackage.py | 12 | 1843 | ##
# Copyright 2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing a Pythonpackage independend of a python version as an easyblock.
This easyblock is no longer supported, and is replaced by VersionIndependentPythonPackage (fixes a typo in the class name).
@author: Kenneth Hoste, Jens Timmerman (Ghent University)
"""
from easybuild.easyblocks.generic.versionindependentpythonpackage import VersionIndependentPythonPackage
class VersionIndependendPythonPackage(VersionIndependentPythonPackage):
"""No longer supported class for building/installing python packages without requiring a specific python package."""
def prepare_step(self):
"""Indicate that this easyblock is no longer supported."""
self.log.nosupport("Replaced by VersionIndependentPythonPackage easyblock", '2.0')
| gpl-2.0 |
emergebtc/muddery | evennia/evennia/server/webserver.py | 2 | 4147 | """
This implements resources for twisted webservers using the wsgi
interface of django. This alleviates the need of running e.g. an
apache server to serve Evennia's web presence (although you could do
that too if desired).
The actual servers are started inside server.py as part of the Evennia
application.
(Lots of thanks to http://githup.com/clemensha/twisted-wsgi-django for
a great example/aid on how to do this.)
"""
import urlparse
from urllib import quote as urlquote
from twisted.web import resource, http
from twisted.internet import reactor
from twisted.application import internet
from twisted.web.proxy import ReverseProxyResource
from twisted.web.server import NOT_DONE_YET
from twisted.web.wsgi import WSGIResource
from django.conf import settings
from django.core.handlers.wsgi import WSGIHandler
UPSTREAM_IPS = settings.UPSTREAM_IPS
#
# X-Forwarded-For Handler
#
class HTTPChannelWithXForwardedFor(http.HTTPChannel):
def allHeadersReceived(self):
"""
Check to see if this is a reverse proxied connection.
"""
CLIENT = 0
http.HTTPChannel.allHeadersReceived(self)
req = self.requests[-1]
client_ip, port = self.transport.client
proxy_chain = req.getHeader('X-FORWARDED-FOR')
if proxy_chain and client_ip in UPSTREAM_IPS:
forwarded = proxy_chain.split(', ', 1)[CLIENT]
self.transport.client = (forwarded, port)
# Monkey-patch Twisted to handle X-Forwarded-For.
http.HTTPFactory.protocol = HTTPChannelWithXForwardedFor
class EvenniaReverseProxyResource(ReverseProxyResource):
def getChild(self, path, request):
"""
Create and return a proxy resource with the same proxy configuration
as this one, except that its path also contains the segment given by
C{path} at the end.
"""
return EvenniaReverseProxyResource(
self.host, self.port, self.path + '/' + urlquote(path, safe=""),
self.reactor)
def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# RFC 2616 tells us that we can omit the port if it's the default port,
# but we have to provide it otherwise
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs
else:
rest = self.path
clientFactory = self.proxyClientFactoryClass(
request.method, rest, request.clientproto,
request.getAllHeaders(), request.content.read(), request)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET
#
# Website server resource
#
class DjangoWebRoot(resource.Resource):
"""
This creates a web root (/) that Django
understands by tweaking the way
child instancee ars recognized.
"""
def __init__(self, pool):
"""
Setup the django+twisted resource
"""
resource.Resource.__init__(self)
self.wsgi_resource = WSGIResource(reactor, pool, WSGIHandler())
def getChild(self, path, request):
"""
To make things work we nudge the
url tree to make this the root.
"""
path0 = request.prepath.pop(0)
request.postpath.insert(0, path0)
return self.wsgi_resource
#
# Threaded Webserver
#
class WSGIWebServer(internet.TCPServer):
"""
This is a WSGI webserver. It makes sure to start
the threadpool after the service itself started,
so as to register correctly with the twisted daemon.
call with WSGIWebServer(threadpool, port, wsgi_resource)
"""
def __init__(self, pool, *args, **kwargs):
"This just stores the threadpool"
self.pool = pool
internet.TCPServer.__init__(self, *args, **kwargs)
def startService(self):
"Start the pool after the service"
internet.TCPServer.startService(self)
self.pool.start()
def stopService(self):
"Safely stop the pool after service stop."
internet.TCPServer.stopService(self)
self.pool.stop()
| bsd-3-clause |
hpicgs/gloperate | source/tests/googletest/googletest/scripts/upload.py | 2511 | 51024 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import md5
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response and directs us to
authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
## elif e.code >= 500 and e.code < 600:
## # Server Error - try again.
## continue
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to 'codereview.appspot.com'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Branch/tree/revision to diff against (used by DVCS).")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5.new(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# SVN base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = mimetype and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = mimetype and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> hash of base file.
self.base_hashes = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the base files, so we can upload them along with our diff.
if self.options.revision:
extra_args = [self.options.revision] + extra_args
gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/.*$", line)
if match:
filecount += 1
filename = match.group(1)
svndiff.append("Index: %s\n" % filename)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.", line)
if match:
self.base_hashes[filename] = match.group(1)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetBaseFile(self, filename):
hash = self.base_hashes[filename]
base_content = None
new_content = None
is_binary = False
if hash == "0" * 40: # All-zero hash indicates no base file.
status = "A"
base_content = ""
else:
status = "M"
base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
if returncode:
ErrorExit("Got error status from 'git show %s'" % hash)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), filename
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if status != "A":
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCS(options):
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an instance of the appropriate class. Exit with an
error if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return MercurialVCS(options, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return SubversionVCS(options)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return GitVCS(options)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
if isinstance(vcs, SubversionVCS):
# base field is only allowed for Subversion.
# Note: Fetching base files may become deprecated in future releases.
base = vcs.GuessBase(options.download_base)
else:
base = None
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
if "@" in cc and not cc.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5.new(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
| mit |
rehandalal/therapist | therapist/cli.py | 1 | 13680 | import os
import re
import shutil
import subprocess
import click
import colorama
from therapist import __version__
from therapist.config import Config
from therapist.messages import (
CONFIRM_PRESERVE_LEGACY_HOOK_MSG,
CONFIRM_REPLACE_HOOK_MSG,
CONFIRM_RESTORE_LEGACY_HOOK_MSG,
CONFIRM_UNINSTALL_HOOK_MSG,
COPYING_HOOK_MSG,
COPYING_LEGACY_HOOK_MSG,
CURRENT_HOOK_NOT_THERAPIST_MSG,
DONE_COPYING_HOOK_MSG,
DONE_COPYING_LEGACY_HOOK_MSG,
DONE_INSTALLING_HOOK_MSG,
DONE_REMOVING_LEGACY_HOOK_MSG,
DONE_UNINSTALLING_HOOK_MSG,
EXISTING_HOOK_MSG,
HOOK_ALREADY_INSTALLED_MSG,
INSTALLING_HOOK_MSG,
INSTALL_ABORTED_MSG,
LEGACY_HOOK_EXISTS_MSG,
MISCONFIGURED_MSG,
NOT_GIT_REPO_MSG,
NO_HOOK_INSTALLED_MSG,
NO_THERAPIST_CONFIG_FILE_MSG,
REMOVING_LEGACY_HOOK_MSG,
UNINSTALLING_HOOK_MSG,
UNINSTALL_ABORTED_MSG,
UNSTAGED_CHANGES_MSG,
UPGRADE_HOOK_MSG,
)
from therapist.plugins.loader import list_plugins
from therapist.runner import Runner
from therapist.runner.result import ResultCollection
from therapist.utils.filesystem import current_git_dir, current_root, list_files
from therapist.utils.hook import calculate_hook_hash, read_hook_hash, read_hook_version
from therapist.utils.git import Git
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
HOOK_VERSION = 2
git = Git()
def output(message, **kwargs):
def repl(match): # pragma: no cover
attr = match.group(0)[2:-1].upper()
if hasattr(colorama.Fore, attr):
return getattr(colorama.Fore, attr)
elif hasattr(colorama.Style, attr):
return getattr(colorama.Style, attr)
else:
return match.group(0)
message, count = re.subn("#{(.+?)}", repl, message)
message = colorama.Style.RESET_ALL + message + colorama.Style.RESET_ALL
print(message, **kwargs)
def report_misconfigured_and_exit(err):
output(MISCONFIGURED_MSG.format(err.message))
if err.code == Config.Misconfigured.PLUGIN_NOT_INSTALLED:
output("Installed plugins:")
for p in list_plugins():
output(p)
exit(1)
def get_config(disable_git=False):
git_dir = current_git_dir()
root_dir = current_root()
extra_kw = {}
git_root = os.path.dirname(git_dir) if git_dir else None
if root_dir is None:
output(NO_THERAPIST_CONFIG_FILE_MSG)
exit(1)
try:
config = Config(root_dir)
except Config.Misconfigured as err:
report_misconfigured_and_exit(err)
else:
if not disable_git and git_root == root_dir:
extra_kw["enable_git"] = True
return config, extra_kw
@click.group(invoke_without_command=True)
@click.option("--version", "-V", is_flag=True, help="Show the version and exit.")
def cli(version):
"""A smart pre-commit hook for git."""
if version:
output("v{}".format(__version__))
@cli.command()
@click.option(
"--force",
"-f",
is_flag=True,
help="Force installation of the hook. This will replace any existing hook "
"unless you also use the --preserve-legacy option.",
)
@click.option(
"--fix", is_flag=True, help="The hook will automatically fix problems where possible."
)
@click.option(
"--no-stage-modified-files",
is_flag=True,
help="Disables the staging of files modified by the hook.",
)
@click.option("--no-color", is_flag=True, help="Disables colors and other rich output.")
@click.option("--preserve-legacy", is_flag=True, help="Preserves any existing pre-commit hook.")
def install(**kwargs):
"""Install the pre-commit hook."""
force = kwargs.get("force")
preserve_legacy = kwargs.get("preserve_legacy")
colorama.init(strip=kwargs.get("no_color"))
stdout = subprocess.check_output("which therapist", shell=True)
therapist_bin = stdout.decode("utf-8").split()[0]
git_dir = current_git_dir()
if git_dir is None:
output(NOT_GIT_REPO_MSG)
exit(1)
hook_options = {
"fix": "--fix" if kwargs.get("fix") else "",
"stage_modified_files": ""
if kwargs.get("no_stage_modified_files")
else "--stage-modified-files",
"therapist_bin": therapist_bin,
}
srchook_path = os.path.join(BASE_DIR, "hooks", "pre-commit-template")
with open(srchook_path, "r") as f:
srchook = f.read()
srchook_hash = calculate_hook_hash(srchook_path, hook_options)
dsthook_path = os.path.join(git_dir, "hooks", "pre-commit")
if os.path.isfile(dsthook_path):
dsthook_hash = read_hook_hash(dsthook_path)
if dsthook_hash:
if dsthook_hash == srchook_hash:
output(HOOK_ALREADY_INSTALLED_MSG)
exit(0)
else:
if not force and not preserve_legacy:
print(EXISTING_HOOK_MSG)
preserve_legacy = click.confirm(CONFIRM_PRESERVE_LEGACY_HOOK_MSG, default=True)
if preserve_legacy:
output(COPYING_HOOK_MSG, end="")
shutil.copy2(dsthook_path, "{}.legacy".format(dsthook_path))
output(DONE_COPYING_HOOK_MSG)
elif not force:
if not click.confirm(CONFIRM_REPLACE_HOOK_MSG, default=False):
output(INSTALL_ABORTED_MSG)
exit(1)
output(INSTALLING_HOOK_MSG, end="")
with open(dsthook_path, "w+") as f:
srchook = srchook.replace("%hash%", srchook_hash)
for k, v in hook_options.items():
srchook = srchook.replace("%{}%".format(k), v)
f.write(srchook)
os.chmod(dsthook_path, 0o775)
output(DONE_INSTALLING_HOOK_MSG)
@cli.command()
@click.option(
"--force",
"-f",
is_flag=True,
help="Force uninstallation of the Therapist pre-commit hook. This will "
"also remove any legacy hook unless you also use the "
"--restore-legacy option.",
)
@click.option("--no-color", is_flag=True, help="Disables colors and other rich output.")
@click.option("--restore-legacy", is_flag=True, help="Restores any legacy pre-commit hook.")
def uninstall(**kwargs):
"""Uninstall the current pre-commit hook."""
force = kwargs.get("force")
restore_legacy = kwargs.get("restore_legacy")
colorama.init(strip=kwargs.get("no_color"))
git_dir = current_git_dir()
if git_dir is None:
output(NOT_GIT_REPO_MSG)
exit(1)
hook_path = os.path.join(git_dir, "hooks", "pre-commit")
if not os.path.isfile(hook_path):
output(NO_HOOK_INSTALLED_MSG)
exit(0)
hook_hash = read_hook_hash(hook_path)
if hook_hash:
if not force:
if not click.confirm(CONFIRM_UNINSTALL_HOOK_MSG, default=False):
output(UNINSTALL_ABORTED_MSG)
exit(1)
else:
output(CURRENT_HOOK_NOT_THERAPIST_MSG)
exit(1)
legacy_hook_path = os.path.join(git_dir, "hooks", "pre-commit.legacy")
if os.path.isfile(legacy_hook_path):
if not force and not restore_legacy:
output(LEGACY_HOOK_EXISTS_MSG)
restore_legacy = click.confirm(CONFIRM_RESTORE_LEGACY_HOOK_MSG, default=True)
if restore_legacy:
output(COPYING_LEGACY_HOOK_MSG, end="")
shutil.copy2(legacy_hook_path, hook_path)
os.remove(legacy_hook_path)
output(DONE_COPYING_LEGACY_HOOK_MSG)
exit(0)
else:
if force or click.confirm("Would you like to remove the legacy hook?", default=False):
output(REMOVING_LEGACY_HOOK_MSG, end="")
os.remove(legacy_hook_path)
output(DONE_REMOVING_LEGACY_HOOK_MSG)
output(UNINSTALLING_HOOK_MSG, end="")
os.remove(hook_path)
output(DONE_UNINSTALLING_HOOK_MSG)
@cli.command()
@click.argument("paths", nargs=-1)
@click.option("--action", "-a", default=None, help="A name of a specific action to be run.")
@click.option("--disable-git", is_flag=True, help="Disable git-aware features.")
@click.option("--enable-git", is_flag=True, help="Enable git-aware features.")
@click.option("--fix", is_flag=True, help="Automatically fixes problems where possible.")
@click.option("--include-unstaged", is_flag=True, help="Include unstaged files.")
@click.option(
"--include-unstaged-changes", is_flag=True, help="Include unstaged changes to staged files."
)
@click.option("--include-untracked", is_flag=True, help="Include untracked files.")
@click.option(
"--junit-xml", default=None, help="Create a junit-xml style report file at the given path."
)
@click.option("--no-color", is_flag=True, help="Disables colors and other rich output.")
@click.option("--plugin", "-p", default=None, help="A name of a specific plugin to be run.")
@click.option(
"--stage-modified-files",
is_flag=True,
help="Files that are modified by any actions should be staged.",
)
@click.option("--use-tracked-files", is_flag=True, help="Runs actions against all tracked files.")
@click.option("--quiet", "-q", is_flag=True, help="Suppress all output, unless an error occurs.")
def run(**kwargs):
"""Run the Therapist suite."""
paths = kwargs.pop("paths", ())
action = kwargs.pop("action")
plugin = kwargs.pop("plugin")
junit_xml = kwargs.pop("junit_xml")
use_tracked_files = kwargs.pop("use_tracked_files")
quiet = kwargs.pop("quiet")
disable_git = kwargs.pop("disable_git")
colorama.init(strip=kwargs.pop("no_color"))
git_dir = current_git_dir()
root_dir = current_root()
config, extra_kw = get_config(disable_git=disable_git)
kwargs.update(extra_kw)
# Validate any installed hook is the minimum required version
if git_dir:
hook_path = os.path.join(git_dir, "hooks", "pre-commit")
if os.path.isfile(hook_path):
hook_version = read_hook_version(hook_path)
if hook_version and hook_version < HOOK_VERSION:
output(UPGRADE_HOOK_MSG)
exit(1)
files = []
if paths:
# We want to look at files in their current state if paths are passed through
kwargs["include_unstaged_changes"] = True
# If paths were provided get all the files for each path
for path in paths:
for f in list_files(path):
f = os.path.relpath(f, root_dir)
if not f.startswith(".."): # Don't include files outside the repo root.
files.append(f)
elif use_tracked_files:
# If the use tracked files flag was passed, get a list of all the tracked files
out, err, code = git.ls_files()
files = out.splitlines()
# Filter out any files that have been deleted
out, err, code = git.status(porcelain=True)
for line in out.splitlines():
if line[0] == "D" or line[1] == "D":
files.remove(line[3:])
if kwargs.get("include_untracked"):
out, err, code = git.ls_files(o=True, exclude_standard=True)
files += out.splitlines()
if files or paths:
kwargs["files"] = files
runner = Runner(config.cwd, **kwargs)
results = ResultCollection()
if runner.unstaged_changes and not quiet:
output(UNSTAGED_CHANGES_MSG, end="\n\n")
processes = list(config.actions) + list(config.plugins)
processes.sort(key=lambda x: x.name) # Sort the list of processes for consistent results
if plugin:
try:
processes = [config.plugins.get(plugin)]
except config.plugins.DoesNotExist as e:
output("{}\nAvailable plugins:".format(e.message))
for p in config.plugins:
output(p.name)
exit(1)
if action:
try:
processes = [config.actions.get(action)]
except config.actions.DoesNotExist as e:
output("{}\nAvailable actions:".format(e.message))
for a in config.actions:
output(a.name)
exit(1)
for process in processes:
result, message = runner.run_process(process)
results.append(result)
if not quiet:
output(message)
if junit_xml:
with open(junit_xml, "w+") as f:
f.write("{}".format(results.dump_junit()))
if not quiet:
output(results.dump())
output(
"#{{bright}}{}\nCompleted in: {}s".format(
"".ljust(79, "-"), round(results.execution_time, 2)
)
)
if results.has_error:
exit(1)
elif results.has_failure:
exit(2)
@cli.command()
@click.argument("shortcut", nargs=1)
@click.argument("paths", nargs=-1)
@click.pass_context
def use(ctx, shortcut, paths):
"""Use a shortcut."""
config, _ = get_config()
try:
use_shortcut = config.shortcuts.get(shortcut)
while use_shortcut.extends is not None:
base = config.shortcuts.get(use_shortcut.extends)
use_shortcut = base.extend(use_shortcut)
except config.shortcuts.DoesNotExist as err:
output("{}\nAvailable shortcuts:".format(err.message))
for s in config.shortcuts:
output(s.name)
exit(1)
else:
options = use_shortcut.options
for flag in use_shortcut.flags:
options[flag.replace("-", "_")] = True
options_string = ""
for k, v in sorted(options.items()):
options_string += " --{}".format(k.replace("_", "-"))
if v is not True:
options_string += " {}".format(v)
output("#{{dim}}$ therapist run{} {}\n".format(options_string, " ".join(paths)))
options["paths"] = paths
ctx.invoke(run, **options)
| mpl-2.0 |
googlearchive/googlecrisismap | logs.py | 4 | 2500 | #!/usr/bin/python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at: http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distrib-
# uted under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for
# specific language governing permissions and limitations under the License.
"""Storage for application-wide event logs."""
import datetime
import logging
import users
import utils
from google.appengine.ext import db
__author__ = 'romano@google.com (Raquel Romano)'
# Events to log.
# Event is capitalized like an enum class. # pylint: disable=g-bad-name
Event = utils.Struct(
DOMAIN_CREATED='DOMAIN_CREATED',
MAP_CREATED='MAP_CREATED',
MAP_DELETED='MAP_DELETED',
MAP_PUBLISHED='MAP_PUBLISHED',
MAP_UNPUBLISHED='MAP_UNPUBLISHED',
MAP_UNDELETED='MAP_UNDELETED',
MAP_BLOCKED='MAP_BLOCKED',
MAP_UNBLOCKED='MAP_UNBLOCKED',
MAP_WIPED='MAP_WIPED'
)
class EventLog(db.Model):
"""Information about an interesting event."""
time = db.DateTimeProperty()
uid = db.StringProperty()
event = db.StringProperty(required=True, choices=list(Event))
domain_name = db.StringProperty()
map_id = db.StringProperty()
map_version_key = db.StringProperty()
catalog_entry_key = db.StringProperty()
acceptable_purpose = db.BooleanProperty(default=False)
acceptable_org = db.BooleanProperty(default=False)
org_name = db.StringProperty()
def RecordEvent(event, domain_name=None, map_id=None, map_version_key=None,
catalog_entry_key=None, acceptable_purpose=None,
acceptable_org=None, org_name=None, uid=None):
"""Stores an event log entry."""
if not uid:
user = users.GetCurrent()
uid = user and user.id or None
try:
EventLog(time=datetime.datetime.utcnow(),
uid=uid,
event=event,
domain_name=domain_name,
map_id=map_id,
map_version_key=map_version_key,
catalog_entry_key=catalog_entry_key,
acceptable_purpose=acceptable_purpose,
acceptable_org=acceptable_org,
org_name=org_name).put()
except Exception, e: # pylint: disable=broad-except
logging.exception(e)
| apache-2.0 |
victortxa/fatiando | fatiando/inversion/misfit.py | 6 | 9134 | r"""
Defines base classes to represent a data-misfit functions (l2-norm, etc)
These classes can be used to implement parameter estimation problems
(inversions). They automate most of the boiler plate required and provide
direct access to ready-made optimization routines and regularization.
For now, only implements an l2-norm data misfit:
* :class:`~fatiando.inversion.misfit.Misfit`: an l2-norm data-misfit function
See the documentation for :mod:`fatiando.inversion` for examples of using
``Misfit``.
----
"""
from __future__ import division, absolute_import
import copy
from abc import abstractmethod
import numpy as np
import scipy.sparse
from ..utils import safe_dot
from .base import (OptimizerMixin, OperatorMixin, CachedMethod,
CachedMethodPermanent)
class Misfit(OptimizerMixin, OperatorMixin):
r"""
An l2-norm data-misfit function.
This is a kind of objective function that measures the misfit between
observed data :math:`\bar{d}^o` and data predicted by a set of model
parameters :math:`\bar{d} = \bar{f}(\bar{p})`.
The l2-norm data-misfit is defined as:
.. math::
\phi (\bar{p}) = \bar{r}^T \bar{r}
where :math:`\bar{r} = \bar{d}^o - \bar{d}` is the residual vector and
:math:`N` is the number of data.
When subclassing this class, you must implement the method:
* ``predicted(self, p)``: calculates the predicted data
:math:`\bar{d}` for a given parameter vector ``p``
If you want to use any gradient-based solver (you probably do), you'll need
to implement the method:
* ``jacobian(self, p)``: calculates the Jacobian matrix of
:math:`\bar{f}(\bar{p})` evaluated at ``p``
If :math:`\bar{f}` is linear, then the Jacobian will be cached in memory so
that it is only calculated once when using the class multiple times. So
solving the same problem with different methods or using an iterative
method doesn't have the penalty of recalculating the Jacobian.
.. warning::
When subclassing, be careful not to set the following attributes:
``data``, ``nparams``, ``islinear``, ``nparams``, ``ndata``, and
(most importantly) ``regul_param`` and ``_regularizing_parameter``.
This could mess with internal behavior and break things in unexpected
ways.
Parameters:
* data : 1d-array
The observed data vector :math:`\bar{d}^o`
* nparams : int
The number of parameters in parameter vector :math:`\bar{p}`
* islinear : True or False
Whether :math:`\bar{f}` is linear or not.
* cache : True
Whether or not to cache the output of some methods to avoid recomputing
matrices and vectors when passed the same input parameter vector.
"""
def __init__(self, data, nparams, islinear, cache=True):
self.p_ = None
self.nparams = nparams
self.islinear = islinear
self.data = data
self.ndata = self.data.size
self.weights = None
if cache:
self.predicted = CachedMethod(self, 'predicted')
if islinear:
self.jacobian = CachedMethodPermanent(self, 'jacobian')
self.hessian = CachedMethodPermanent(self, 'hessian')
else:
self.jacobian = CachedMethod(self, 'jacobian')
def copy(self, deep=False):
"""
Make a copy of me together with all the cached methods.
"""
if deep:
obj = copy.deepcopy(self)
else:
obj = copy.copy(self)
for name in ['predicted', 'jacobian', 'hessian']:
meth = getattr(obj, name)
is_cached = (isinstance(meth, CachedMethod) or
isinstance(meth, CachedMethodPermanent))
if is_cached:
setattr(obj, name, copy.copy(meth))
getattr(obj, name).instance = obj
return obj
def set_weights(self, weights):
r"""
Set the data weights.
Using weights for the data, the least-squares data-misfit function
becomes:
.. math::
\phi = \bar{r}^T \bar{\bar{W}}\bar{r}
Parameters:
* weights : 1d-array or 2d-array or None
Weights for the data vector.
If None, will remove any weights that have been set before.
If it is a 2d-array, it will be interpreted as the weight matrix
:math:`\bar{\bar{W}}`.
If it is a 1d-array, it will be interpreted as the diagonal of the
weight matrix (all off-diagonal elements will default to zero).
The weight matrix can be a sparse array from ``scipy.sparse``.
"""
self.weights = weights
if weights is not None:
assert len(weights.shape) <= 2, \
"Invalid weights array with shape {}. ".format(weights.shape) \
+ "Weights array should be 1d or 2d"
if len(weights.shape) == 1:
self.weights = scipy.sparse.diags(weights, 0)
# Weights change the Hessian
self.hessian.hard_reset()
return self
def residuals(self, p=None):
"""
Calculate the residuals vector (observed - predicted data).
Parameters:
* p : 1d-array or None
The parameter vector used to calculate the residuals. If None, will
use the current estimate stored in ``estimate_``.
Returns:
* residuals : 1d-array or list of 1d-arrays
The residual vector. If this is the sum of 1 or more Misfit
instances, will return the residual vector from each of the summed
misfits in the order of the sum.
"""
res = self.data - self.predicted(p)
return res
@abstractmethod
def predicted(self, p=None):
"""
Calculate the predicted data for a given parameter vector.
Parameters:
* p : 1d-array or None
The parameter vector used to calculate the predicted data. If None,
will use the current estimate stored in ``estimate_``.
Returns:
* predicted : 1d-array or list of 1d-arrays
The predicted data. If this is the sum of 1 or more Misfit
instances, will return the predicted data from each of the summed
misfits in the order of the sum.
"""
pass
def value(self, p):
r"""
Calculate the value of the misfit for a given parameter vector.
The value is given by:
.. math::
\phi(\bar{p}) = \bar{r}^T\bar{\bar{W}}\bar{r}
where :math:`\bar{r}` is the residual vector and :math:`bar{\bar{W}}`
are optional data weights.
Parameters:
* p : 1d-array or None
The parameter vector.
Returns:
* value : float
The value of the misfit function.
"""
residuals = self.data - self.predicted(p)
if self.weights is None:
val = np.linalg.norm(residuals)**2
else:
val = np.sum(self.weights*(residuals**2))
return val*self.regul_param
def hessian(self, p):
r"""
The Hessian of the misfit function with respect to the parameters.
Calculated using the Gauss approximation:
.. math::
\bar{\bar{H}} \approx 2\bar{\bar{J}}^T\bar{\bar{J}}
where :math:`\bar{\bar{J}}` is the Jacobian matrix.
For linear problems, the Hessian matrix is cached in memory, so calling
this method again will not trigger a re-calculation.
Parameters:
* p : 1d-array
The parameter vector where the Hessian is evaluated
Returns:
* hessian : 2d-array
The Hessian matrix
"""
jacobian = self.jacobian(p)
if self.weights is None:
hessian = safe_dot(jacobian.T, jacobian)
else:
hessian = safe_dot(jacobian.T, self.weights*jacobian)
hessian *= 2*self.regul_param
return hessian
def gradient(self, p):
r"""
The gradient vector of the misfit function.
.. math::
\bar{g} = -2\bar{\bar{J}}^T\bar{r}
where :math:`\bar{\bar{J}}` is the Jacobian matrix and :math:`\bar{r}`
is the residual vector.
Parameters:
* p : 1d-array
The parameter vector where the gradient is evaluated
Returns:
* gradient : 1d-array
The gradient vector.
"""
jacobian = self.jacobian(p)
if p is None:
tmp = self.data
else:
tmp = self.data - self.predicted(p)
if self.weights is None:
grad = safe_dot(jacobian.T, tmp)
else:
grad = safe_dot(jacobian.T, self.weights*tmp)
# Check if the gradient isn't a one column matrix
if len(grad.shape) > 1:
# Need to convert it to a 1d array so that hell won't break loose
grad = np.array(grad).ravel()
grad *= -2*self.regul_param
return grad
| bsd-3-clause |
chennan47/osf.io | addons/github/api.py | 16 | 5404 | import urllib
import itertools
import github3
import cachecontrol
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError
from addons.github import settings as github_settings
from addons.github.exceptions import NotFoundError
# Initialize caches
https_cache = cachecontrol.CacheControlAdapter()
default_adapter = HTTPAdapter()
class GitHubClient(object):
def __init__(self, external_account=None, access_token=None):
self.access_token = getattr(external_account, 'oauth_key', None) or access_token
if self.access_token:
self.gh3 = github3.login(token=self.access_token)
self.gh3.set_client_id(
github_settings.CLIENT_ID, github_settings.CLIENT_SECRET
)
else:
self.gh3 = github3.GitHub()
# Caching libary
if github_settings.CACHE:
self.gh3._session.mount('https://api.github.com/user', default_adapter)
self.gh3._session.mount('https://', https_cache)
def user(self, user=None):
"""Fetch a user or the authenticated user.
:param user: Optional GitHub user name; will fetch authenticated
user if omitted
:return dict: GitHub API response
"""
return self.gh3.user(user)
def repo(self, user, repo):
"""Get a single Github repo's info.
:param str user: GitHub user name
:param str repo: GitHub repo name
:return: Dict of repo information
See http://developer.github.com/v3/repos/#get
"""
try:
rv = self.gh3.repository(user, repo)
except ConnectionError:
raise NotFoundError
if rv:
return rv
raise NotFoundError
def repos(self):
return self.gh3.iter_repos(type='all', sort='full_name')
def user_repos(self, user):
return self.gh3.iter_user_repos(user, type='all', sort='full_name')
def my_org_repos(self, permissions=None):
permissions = permissions or ['push']
return itertools.chain.from_iterable(
team.iter_repos()
for team in self.gh3.iter_user_teams()
if team.permission in permissions
)
def create_repo(self, repo, **kwargs):
return self.gh3.create_repo(repo, **kwargs)
def branches(self, user, repo, branch=None):
"""List a repo's branches or get a single branch (in a list).
:param str user: GitHub user name
:param str repo: GitHub repo name
:param str branch: Branch name if getting a single branch
:return: List of branch dicts
http://developer.github.com/v3/repos/#list-branches
"""
if branch:
return [self.repo(user, repo).branch(branch)]
return self.repo(user, repo).iter_branches() or []
# TODO: Test
def starball(self, user, repo, archive='tar', ref='master'):
"""Get link for archive download.
:param str user: GitHub user name
:param str repo: GitHub repo name
:param str archive: Archive format [tar|zip]
:param str ref: Git reference
:returns: tuple: Tuple of headers and file location
"""
# github3 archive method writes file to disk
repository = self.repo(user, repo)
url = repository._build_url(archive + 'ball', ref, base_url=repository._api)
resp = repository._get(url, allow_redirects=True, stream=True)
return resp.headers, resp.content
#########
# Hooks #
#########
def hooks(self, user, repo):
"""List webhooks
:param str user: GitHub user name
:param str repo: GitHub repo name
:return list: List of commit dicts from GitHub; see
http://developer.github.com/v3/repos/hooks/#json-http
"""
return self.repo(user, repo).iter_hooks()
def add_hook(self, user, repo, name, config, events=None, active=True):
"""Create a webhook.
:param str user: GitHub user name
:param str repo: GitHub repo name
:return dict: Hook info from GitHub: see see
http://developer.github.com/v3/repos/hooks/#json-http
"""
try:
hook = self.repo(user, repo).create_hook(name, config, events, active)
except github3.GitHubError:
# TODO Handle this case - if '20 hooks' in e.errors[0].get('message'):
return None
else:
return hook
def delete_hook(self, user, repo, _id):
"""Delete a webhook.
:param str user: GitHub user name
:param str repo: GitHub repo name
:return bool: True if successful, False otherwise
:raises: NotFoundError if repo or hook cannot be located
"""
repo = self.repo(user, repo)
hook = repo.hook(_id)
if hook is None:
raise NotFoundError
return repo.hook(_id).delete()
########
# Auth #
########
def revoke_token(self):
if self.access_token:
return self.gh3.revoke_authorization(self.access_token)
def ref_to_params(branch=None, sha=None):
params = urllib.urlencode({
key: value
for key, value in {
'branch': branch,
'sha': sha,
}.iteritems()
if value
})
if params:
return '?' + params
return ''
| apache-2.0 |
tomkralidis/QGIS | tests/src/python/test_qgslayoutpicture.py | 30 | 9015 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutPicture.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '(C) 2017 by Nyall Dawson'
__date__ = '23/10/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
import os
import socketserver
import threading
import http.server
from qgis.PyQt.QtCore import QRectF, QDir
from qgis.PyQt.QtTest import QSignalSpy
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsLayoutItemPicture,
QgsLayout,
QgsLayoutItemMap,
QgsRectangle,
QgsCoordinateReferenceSystem,
QgsProject,
QgsReadWriteContext
)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
from qgslayoutchecker import QgsLayoutChecker
from test_qgslayoutitem import LayoutItemTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestQgsLayoutPicture(unittest.TestCase, LayoutItemTestCase):
@classmethod
def setUpClass(cls):
cls.item_class = QgsLayoutItemPicture
# Bring up a simple HTTP server, for remote picture tests
os.chdir(unitTestDataPath() + '')
handler = http.server.SimpleHTTPRequestHandler
cls.httpd = socketserver.TCPServer(('localhost', 0), handler)
cls.port = cls.httpd.server_address[1]
cls.httpd_thread = threading.Thread(target=cls.httpd.serve_forever)
cls.httpd_thread.setDaemon(True)
cls.httpd_thread.start()
def __init__(self, methodName):
"""Run once on class initialization."""
unittest.TestCase.__init__(self, methodName)
TEST_DATA_DIR = unitTestDataPath()
self.pngImage = TEST_DATA_DIR + "/sample_image.png"
self.svgImage = TEST_DATA_DIR + "/sample_svg.svg"
# create composition
self.layout = QgsLayout(QgsProject.instance())
self.layout.initializeDefaults()
self.picture = QgsLayoutItemPicture(self.layout)
self.picture.setPicturePath(self.pngImage)
self.picture.attemptSetSceneRect(QRectF(70, 70, 100, 100))
self.picture.setFrameEnabled(True)
self.layout.addLayoutItem(self.picture)
def setUp(self):
self.report = "<h1>Python QgsLayoutItemPicture Tests</h1>\n"
def tearDown(self):
report_file_path = "%s/qgistest.html" % QDir.tempPath()
with open(report_file_path, 'a') as report_file:
report_file.write(self.report)
def testMode(self):
pic = QgsLayoutItemPicture(self.layout)
# should default to unknown
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatUnknown)
spy = QSignalSpy(pic.changed)
pic.setMode(QgsLayoutItemPicture.FormatRaster)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatRaster)
self.assertEqual(len(spy), 1)
pic.setMode(QgsLayoutItemPicture.FormatRaster)
self.assertEqual(len(spy), 1)
pic.setMode(QgsLayoutItemPicture.FormatSVG)
self.assertEqual(len(spy), 3) # ideally only 2!
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatSVG)
# set picture path without explicit format
pic.setPicturePath(self.pngImage)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatRaster)
pic.setPicturePath(self.svgImage)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatSVG)
# forced format
pic.setPicturePath(self.pngImage, QgsLayoutItemPicture.FormatSVG)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatSVG)
pic.setPicturePath(self.pngImage, QgsLayoutItemPicture.FormatRaster)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatRaster)
pic.setPicturePath(self.svgImage, QgsLayoutItemPicture.FormatSVG)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatSVG)
pic.setPicturePath(self.svgImage, QgsLayoutItemPicture.FormatRaster)
self.assertEquals(pic.mode(), QgsLayoutItemPicture.FormatRaster)
def testReadWriteXml(self):
pr = QgsProject()
l = QgsLayout(pr)
pic = QgsLayoutItemPicture(l)
# mode should be saved/restored
pic.setMode(QgsLayoutItemPicture.FormatRaster)
# save original item to xml
doc = QDomDocument("testdoc")
elem = doc.createElement("test")
self.assertTrue(pic.writeXml(elem, doc, QgsReadWriteContext()))
pic2 = QgsLayoutItemPicture(l)
self.assertTrue(pic2.readXml(elem.firstChildElement(), doc, QgsReadWriteContext()))
self.assertEqual(pic2.mode(), QgsLayoutItemPicture.FormatRaster)
pic.setMode(QgsLayoutItemPicture.FormatSVG)
elem = doc.createElement("test2")
self.assertTrue(pic.writeXml(elem, doc, QgsReadWriteContext()))
pic3 = QgsLayoutItemPicture(l)
self.assertTrue(pic3.readXml(elem.firstChildElement(), doc, QgsReadWriteContext()))
self.assertEqual(pic3.mode(), QgsLayoutItemPicture.FormatSVG)
def testResizeZoom(self):
"""Test picture resize zoom mode."""
self.picture.setResizeMode(QgsLayoutItemPicture.Zoom)
checker = QgsLayoutChecker('composerpicture_resize_zoom', self.layout)
checker.setControlPathPrefix("composer_picture")
testResult, message = checker.testLayout()
self.report += checker.report()
assert testResult, message
def testRemoteImage(self):
"""Test fetching remote picture."""
self.picture.setPicturePath(
'http://localhost:' + str(TestQgsLayoutPicture.port) + '/qgis_local_server/logo.png')
checker = QgsLayoutChecker('composerpicture_remote', self.layout)
checker.setControlPathPrefix("composer_picture")
testResult, message = checker.testLayout()
self.report += checker.report()
self.picture.setPicturePath(self.pngImage)
assert testResult, message
def testNorthArrowWithMapItemRotation(self):
"""Test picture rotation when map item is also rotated"""
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
map.setExtent(QgsRectangle(0, -256, 256, 0))
layout.addLayoutItem(map)
picture = QgsLayoutItemPicture(layout)
layout.addLayoutItem(picture)
picture.setLinkedMap(map)
self.assertEqual(picture.linkedMap(), map)
picture.setNorthMode(QgsLayoutItemPicture.GridNorth)
map.setItemRotation(45)
self.assertEqual(picture.pictureRotation(), 45)
map.setMapRotation(-34)
self.assertEqual(picture.pictureRotation(), 11)
# add an offset
picture.setNorthOffset(-10)
self.assertEqual(picture.pictureRotation(), 1)
map.setItemRotation(55)
self.assertEqual(picture.pictureRotation(), 11)
def testGridNorth(self):
"""Test syncing picture to grid north"""
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
map.setExtent(QgsRectangle(0, -256, 256, 0))
layout.addLayoutItem(map)
picture = QgsLayoutItemPicture(layout)
layout.addLayoutItem(picture)
picture.setLinkedMap(map)
self.assertEqual(picture.linkedMap(), map)
picture.setNorthMode(QgsLayoutItemPicture.GridNorth)
map.setMapRotation(45)
self.assertEqual(picture.pictureRotation(), 45)
# add an offset
picture.setNorthOffset(-10)
self.assertEqual(picture.pictureRotation(), 35)
def testTrueNorth(self):
"""Test syncing picture to true north"""
layout = QgsLayout(QgsProject.instance())
map = QgsLayoutItemMap(layout)
map.attemptSetSceneRect(QRectF(0, 0, 10, 10))
map.setCrs(QgsCoordinateReferenceSystem.fromEpsgId(3575))
map.setExtent(QgsRectangle(-2126029.962, -2200807.749, -119078.102, -757031.156))
layout.addLayoutItem(map)
picture = QgsLayoutItemPicture(layout)
layout.addLayoutItem(picture)
picture.setLinkedMap(map)
self.assertEqual(picture.linkedMap(), map)
picture.setNorthMode(QgsLayoutItemPicture.TrueNorth)
self.assertAlmostEqual(picture.pictureRotation(), 37.20, 1)
# shift map
map.setExtent(QgsRectangle(2120672.293, -3056394.691, 2481640.226, -2796718.780))
self.assertAlmostEqual(picture.pictureRotation(), -38.18, 1)
# rotate map
map.setMapRotation(45)
self.assertAlmostEqual(picture.pictureRotation(), -38.18 + 45, 1)
# add an offset
picture.setNorthOffset(-10)
self.assertAlmostEqual(picture.pictureRotation(), -38.18 + 35, 1)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.